最终采用了https://blog.csdn.net/smilestone_322/article/details/7605101的方案
转自:http://blog.sina.com.cn/s/blog_62949ff40101egbw.html
1.到http://ffmpeg.zeranoe.com/builds/
下载ffmpeg-20131120-git-e502783-win32-dev和ffmpeg-20131120-git-e502783-win32-shared
2.打开vs2008建立控制台程序,将ffmpeg-20131120-git-e502783-win32-dev下的include和lib拷贝到程序目录下,设置和头文件和库的依赖目录
3.编译过程:
错误一:无法打开包括文件:“inttypes.h”: No such file or directory
解决方法:删除之,并在其之前添加如下代码:
#if defined(WIN32) && !defined(__MINGW32__) && !defined(__CYGWIN__)
# define CONFIG_WIN32
#endif
#if defined(WIN32) && !defined(__MINGW32__) && !defined(__CYGWIN__) && !defined(EMULATE_INTTYPES)
# define EMULATE_INTTYPES
#endif
#ifndef EMULATE_INTTYPES
# include
#else
typedef signed char int8_t;
typedef signed short int16_t;
typedef signed int int32_t;
typedef unsigned char uint8_t;
typedef unsigned short uint16_t;
typedef unsigned int uint32_t;
# ifdef CONFIG_WIN32
typedef signed __int64 int64_t;
typedef unsigned __int64 uint64_t;
# else
typedef signed long long int64_t;
typedef unsigned long long uint64_t;
# endif
#endif
错误二: error C3861: “UINT64_C”: 找不到标识符
解决方法:在common.h中添加如下代码:
#ifndef INT64_C
#define INT64_C(c) (c ## LL)
#define UINT64_C(c) (c ## ULL)
#endif
错误三:
注释掉
//#if defined(__cplusplus) && !defined(__STDC_CONSTANT_MACROS) && !defined(UINT64_C)
//#error missing -D__STDC_CONSTANT_MACROS / #define __STDC_CONSTANT_MACROS
//#endif
4.示例代码:从一个视频文件中,拆出五张图片
#include
#include
#ifdef __cplusplus
extern "C" {
#endif
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#ifdef __cplusplus
}
#endif
void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame) {
FILE *pFile;
char szFilename[32];
int y;
// Open file
sprintf(szFilename, "frame%d.ppm", iFrame);
pFile=fopen(szFilename, "wb");
if(pFile==NULL)
return;
// Write header
fprintf(pFile, "P6\n%d %d\n255\n", width, height);
// Write pixel data
for(y=0; y
fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1, width*3, pFile);
// Close file
fclose(pFile);
}
int main() {
AVFormatContext *pFormatCtx;
int i, videoStream;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVFrame *pFrameRGB;
AVPacket packet;
int frameFinished;
int numBytes;
uint8_t *buffer;
static struct SwsContext *img_convert_ctx;
char * filePath="test.mp4";
// Register all formats and codecs
av_register_all();
// Open video file
if(avformat_open_input(&pFormatCtx, filePath, NULL, NULL)!=0)
return -1; // Couldn't open file
// Retrieve stream information
if(av_find_stream_info(pFormatCtx)<0)
return -1; // Couldn't find stream information
// Dump information about file onto standard error
av_dump_format(pFormatCtx, 0, filePath, 0);
// Find the first video stream
videoStream=-1;
for(i=0; inb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
videoStream=i;
break;
}
if(videoStream==-1)
return -1; // Didn't find a video stream
// Get a pointer to the codec context for the video stream
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
fprintf(stderr, "Unsupported codec!\n");
return -1; // Codec not found
}
// Open codec
if(avcodec_open2(pCodecCtx, pCodec)<0)
return -1; // Could not open codec
// Allocate video frame
pFrame=avcodec_alloc_frame();
// Allocate an AVFrame structure
pFrameRGB=avcodec_alloc_frame();
if(pFrameRGB==NULL)
return -1;
// Determine required buffer size and allocate buffer
numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
pCodecCtx->height);
buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
// Assign appropriate parts of buffer to image planes in pFrameRGB
// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
// of AVPicture
avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
pCodecCtx->width, pCodecCtx->height);
// Read frames and save first five frames to disk
i=0;
while(av_read_frame(pFormatCtx, &packet)>=0) {
if(packet.stream_index==videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if(frameFinished) {
// Convert the image from its native format to RGB
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
// Convert the image from its native format to RGB
sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize,0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
if(++i<=5)
SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height,i);
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}
// Free the RGB image
av_free(buffer);
av_free(pFrameRGB);
// Free the YUV frame
av_free(pFrame);
// Close the codec
avcodec_close(pCodecCtx);
// Close the video file
av_close_input_file(pFormatCtx);
printf("执行完毕\n");
system("pause");
return 0;
}
5.将ffmpeg-20131120-git-e502783-win32-shared\bin下的所有dll都拷贝到和可执行程序同一目录下,运行可执行程序OK
来源:CSDN
作者:huanggang982
链接:https://blog.csdn.net/huanggang982/article/details/103665996