2,553
社区成员




#include "pch.h"
#include <iostream>
#ifdef _MSC_VER
#include <Windows.h>
#pragma warning(disable : 4819)
#pragma warning(disable : 4996)
#endif
#include <stdio.h>
#include <assert.h>
#include <math.h>
#include <time.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}
#ifdef __MINGW32__
#undef main /* Prevents SDL from overriding main() */
#endif
#ifdef _MSC_VER
#undef main
#endif
#ifdef _MSC_VER
#pragma warning(disable : 4244)
#pragma comment(lib, "avformat")
#pragma comment(lib, "avcodec")
#pragma comment(lib, "avutil")
#pragma comment(lib, "swscale")
#endif
int FFmpeg_Code01(int argc, char * argv[]);
int get_error_msg(int code, std::string& msg) {
std::string str_err_msg;
str_err_msg.resize(256);
int ret = av_strerror(code, (char*)str_err_msg.data(), str_err_msg.length());
msg = str_err_msg.c_str();
return ret;
}
int main(int argc, char * argv[])
{
int ret = 0;
do
{
ret = FFmpeg_Code01(argc, argv);
printf("Press Enter to next loop.\n");
getchar();
} while (true);
return ret;
}
int CreateAndFillFrame(AVFrame** ppFrameYUV, int width, int height, const uint8_t *pBuf)
{
int ret = -1;
*ppFrameYUV = av_frame_alloc();
int bytes = 0;
std::string errorMessage;
do
{
ret = av_image_fill_arrays((*ppFrameYUV)->data, (*ppFrameYUV)->linesize, pBuf, AV_PIX_FMT_YUV420P, width, height, 32);
if (ret < 0) {
get_error_msg(ret, errorMessage);
printf("av_image_fill_arrays failed, %s\n", errorMessage.c_str());
break;
}
ret = 0;
} while (false);
return ret;
}
int FFmpeg_Code01(int argc, char * argv[])
{
int ret = -1;
int cnt = 0;
AVFormatContext* pFormatCtx = NULL;
AVDictionary* pDictionary = NULL;
AVPacket packet;
AVFrame *frame = NULL, *frameYUV = NULL;
uint8_t *pBuf = NULL;
struct SwsContext* swsCtx = NULL;
bool videoCodecOpened = false;
AVCodecContext* vCodecCtx = NULL;
const char* path = argv[1];
std::string errorMessage;
do
{
ret = avformat_open_input(&pFormatCtx, path, NULL, &pDictionary);
if (ret != 0) {
get_error_msg(ret, errorMessage);
printf("avformat_open_input failed, %s\n", errorMessage.c_str());
break;
}
int cnt = pFormatCtx->nb_streams;
ret = avformat_find_stream_info(pFormatCtx, NULL);
if (ret != 0) {
get_error_msg(ret, errorMessage);
printf("avformat_find_stream_info failed, %s\n", errorMessage.c_str());
break;
}
int audioStreamIdx = -1, videoStreamIdx = -1;
AVCodec* vCodec = NULL;
videoStreamIdx = av_find_best_stream(pFormatCtx, AVMediaType::AVMEDIA_TYPE_VIDEO, -1, -1, &vCodec, 0);
if (videoStreamIdx < 0) {
printf("av_find_best_stream failed\n");
break;
}
vCodecCtx = pFormatCtx->streams[videoStreamIdx]->codec;
if (!vCodec) {
vCodec = avcodec_find_decoder(vCodecCtx->codec_id);
if (!vCodec) {
printf("avcodec_find_decoder failed, codec_id = [ %d ]\n", vCodecCtx->codec_id);
break;
}
}
ret = avcodec_open2(vCodecCtx, vCodec, NULL);
if (ret < 0) {
get_error_msg(ret, errorMessage);
printf("avcodec_open2 failed, %s\n", errorMessage.c_str());
break;
}
videoCodecOpened = true;
frame = av_frame_alloc();
frameYUV = av_frame_alloc();
int width = vCodecCtx->width;
int height = vCodecCtx->height;
int bytes = 0;
bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 32);
pBuf = (uint8_t *)av_malloc(bytes * sizeof(uint8_t));
if (!pBuf) {
printf("av_malloc failed.\n");
break;
}
ret = av_image_fill_arrays(frameYUV->data, frameYUV->linesize, pBuf, AV_PIX_FMT_YUV420P, width, height, 32);
if (ret < 0) {
get_error_msg(ret, errorMessage);
printf("av_image_fill_arrays failed, %s\n", errorMessage.c_str());
break;
}
swsCtx = sws_getContext(width, height, vCodecCtx->pix_fmt, width, height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
if (!swsCtx) {
printf("sws_getContext failed.\n");
break;
}
int hasErr = 0;
int gotPic = 0;
int cnt_pic_packet = 0;
int cnt_frame = 0;
av_init_packet(&packet);
while (1)
{
int readret = 0;
readret = av_read_frame(pFormatCtx, &packet);
if (readret < 0) {
if (AVERROR_EOF == readret) {
printf("end of file.\n");
break;
}
get_error_msg(readret, errorMessage);
break;
}
if (packet.stream_index == videoStreamIdx) {
ret = avcodec_decode_video2(vCodecCtx, frame, &gotPic, &packet);
if (ret < 0) {
hasErr = 1;
printf("avcodec_decode_video2 failed.\n");
break;
}
cnt_pic_packet++;
if (gotPic) {
cnt_pic_packet = 0;
int sliceHeight = sws_scale(swsCtx, (const uint8_t**)frame->data, frame->linesize,
0, height,
frameYUV->data, frameYUV->linesize);
cnt_frame++;
av_frame_free(&frame);
frame = av_frame_alloc();
av_frame_free(&frameYUV);
ret = CreateAndFillFrame(&frameYUV, width, height, pBuf);
if (ret < 0) {
hasErr = 1;
break;
}
}
}// video stream data
av_packet_unref(&packet);
av_init_packet(&packet);
}// while
if (hasErr) {
break;
}
ret = 0;
} while (false);
if (swsCtx) {
sws_freeContext(swsCtx);
}
if (pBuf) {
av_freep(&pBuf);
}
if (frame) {
av_frame_free(&frame);
}
if (frameYUV) {
av_frame_free(&frameYUV);
}
if (videoCodecOpened) {
avcodec_close(vCodecCtx);
}
if (pFormatCtx)
avformat_close_input(&pFormatCtx);
return ret;
}
把在 FFmpeg_Code01 while里面,说有的break前都加上 av_packet_unref(&packet)看看
代码上来看,是 pDictionary 吧
但是 API 不清楚