以下代码实现了,从1280*544.mp4视频的第十秒开始提取100帧,并将每一帧缩小为源帧的1/4宽高,然后将处理过后的100帧封装到一帧中,并输出为yuv文件。
#include <stdio.h>
#define __STDC_CONSTANT_MACROS
#define FRAMEWITH 320
#define FRAMEHEIGTH 136
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
};
#define NUM 100
int main(int argc, char* argv[])
{
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameYUV[NUM], *pDstFrame;
uint8_t *out_buffer[NUM];
AVPacket *packet;
int y_size;
int ret, got_picture;
struct SwsContext *img_convert_ctx;
//输入文件路径
char filepath[]="1280*544.mp4";
int frame_cnt;
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0)
{
printf("Couldn't open input stream.\n");
return -1;
}
if(avformat_find_stream_info(pFormatCtx,NULL)<0)
{
printf("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for(i = 0; i < pFormatCtx->nb_streams; i++)
{
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
}
if(videoindex == -1)
{
printf("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
{
printf("Codec not found.\n");
return -1;
}
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
{
printf("Could not open codec.\n");
return -1;
}
/*
* 在此处添加输出视频信息的代码
* 取自于pFormatCtx,使用fprintf()
*/
pFrame = av_frame_alloc();
for(int i = 0; i < NUM; i++)
{
pFrameYUV[i] = av_frame_alloc();
}
for(int i = 0; i < NUM; i++)
{
out_buffer[i] = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, FRAMEWITH, FRAMEHEIGTH));
avpicture_fill((AVPicture *)pFrameYUV[i], out_buffer[i], AV_PIX_FMT_YUV420P, FRAMEWITH, FRAMEHEIGTH);
}
pDstFrame = av_frame_alloc();
int nDstSize = avpicture_get_size(AV_PIX_FMT_YUV420P, FRAMEWITH * NUM, FRAMEHEIGTH);
uint8_t *dstbuf = new uint8_t[nDstSize];
avpicture_fill((AVPicture*)pDstFrame, dstbuf, AV_PIX_FMT_YUV420P, FRAMEWITH * NUM, FRAMEHEIGTH);
pDstFrame->width = FRAMEWITH * NUM;
pDstFrame->height = FRAMEHEIGTH;
pDstFrame->format = AV_PIX_FMT_YUV420P;
//将预先分配的AVFrame图像背景数据设置为黑色背景
memset(pDstFrame->data[0], 0, FRAMEWITH * FRAMEHEIGTH * NUM);
memset(pDstFrame->data[1], 0x80, FRAMEWITH * FRAMEHEIGTH / 4 * NUM);
memset(pDstFrame->data[2], 0x80, FRAMEWITH * FRAMEHEIGTH / 4 * NUM);
packet=(AVPacket *)av_malloc(sizeof(AVPacket));
printf("--------------- File Information ----------------\n");
av_dump_format(pFormatCtx, 0, filepath, 0);
printf("-------------------------------------------------\n");
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
FRAMEWITH, FRAMEHEIGTH, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
int count = 0;
frame_cnt = 0;
int make = 0;
FILE *fp_yuv420 = fopen("test.yuv", "wb+");
if(NULL == fp_yuv420)
{
printf("test.yuv open error!\n");
av_frame_free(&pDstFrame);
av_free_packet(packet);
return -1;
}
while((av_read_frame(pFormatCtx, packet) >= 0) && (make < (NUM + 10)))
{
if(packet->stream_index == videoindex)
{
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if(ret < 0)
{
printf("Decode Error.\n");
fclose(fp_yuv420);
av_free_packet(packet);
return -1;
}
if(got_picture && (make > 9))
{
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameYUV[count]->data, pFrameYUV[count]->linesize);
printf("Decoded frame index: %d\n",frame_cnt);
count++;
}
make++;
av_free_packet(packet);
}
}
/* 如果需要直接写为YUV文件 */
for (int i = 0; i < FRAMEHEIGTH; i++)
{
//从pFrameYUV[]中提取数据一帧一帧写入到文件中
for(int j = 0; j < NUM; j++)
{
fwrite(pFrameYUV[j]->data[0] + i*FRAMEWITH, 1, FRAMEWITH, fp_yuv420); //Y
}
}
for (int i = 0; i < FRAMEHEIGTH / 4; i++)
{
//从pFrameYUV[]中提取数据一帧一帧写入到文件中
for(int j =0; j < NUM; j++)
{
fwrite(pFrameYUV[j]->data[1] + i * FRAMEWITH, 1, FRAMEWITH, fp_yuv420); //U
}
}
for (int i = 0; i < FRAMEHEIGTH / 4; i++)
{
for(int j =0; j < NUM; j++)
{
fwrite(pFrameYUV[j]->data[2] + i * FRAMEWITH, 1, FRAMEWITH, fp_yuv420); //V
}
}
fclose(fp_yuv420);
sws_freeContext(img_convert_ctx);
for(int i =0; i< NUM; i++)
{
av_frame_free(&pFrameYUV[i]);
}
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}
1280_544.png
下面这张图是将yuv转换为图片后,由于是宽度是100*320,图片太大,这边就截取其中的一段。
100_320_136.png
网友评论