ffmpeg命令从mp4中提取rgb24
"select=gt(n,0)*lt(n,100)":选择前100张图片
ffmpeg -i input.mp4 -vf "select=gt(n\,0)*lt(n\,100)" -vcodec rawvideo -pix_fmt rgb24 output.rgb
ffplay命令播放rgb24
ffplay -f rawvideo -pixel_format rgb24 -video_size 1280x536 output.rgb
使用ffmpeg库从mp4提取rgb24
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libgen.h>
#include <sys/stat.h>
int main(int argc, char **argv)
{
AVFormatContext *ifmt_ctx = NULL;
AVCodecContext *codec_ctx = NULL;
AVCodecParameters *codec_para = NULL;
AVPacket *pkt = NULL;
AVFrame *frame = NULL;
AVFrame *out_frame = NULL;
struct SwsContext *sws_ctx = NULL;
char *in_filename, *out_filename;
int ret, i;
int video_stream_index = -1;
int cnt = 0;
int file_index = 0;
char out_file[256];
in_filename = "../input.mp4";
out_filename = "frame_rgb/%dx%d_%d.rgb";
// 创建目录
mkdir(dirname(out_filename), 0777);
printf("out_filename: %s\n", out_filename);
pkt = av_packet_alloc();
if (!pkt) {
fprintf(stderr, "Could not allocate AVPacket\n");
goto end;
}
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate AVFrame\n");
goto end;
}
out_frame = av_frame_alloc();
if (!out_frame) {
fprintf(stderr, "Could not allocate AVFrame\n");
goto end;
}
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0) {
fprintf(stderr, "Could not open input file '%s'", in_filename);
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
fprintf(stderr, "Failed to retrieve input stream information");
goto end;
}
av_dump_format(ifmt_ctx, 0, in_filename, 0);
for (i = 0; i < ifmt_ctx->nb_streams; i++) {
if (ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_index = i;
break;
}
}
codec_para = ifmt_ctx->streams[video_stream_index]->codecpar;
const AVCodec *codec = avcodec_find_decoder(codec_para->codec_id);
if (codec == NULL) {
fprintf(stderr, "can not find video decoder\n");
goto end;
}
codec_ctx = avcodec_alloc_context3(codec);
if (codec_ctx == NULL) {
fprintf(stderr, "can not alloc context\n");
goto end;
}
avcodec_parameters_to_context(codec_ctx, codec_para);
ret = avcodec_open2(codec_ctx, NULL, NULL);
if (ret < 0) {
fprintf(stderr, "can not open context\n");
goto end;
}
int w = codec_ctx->width;
int h = codec_ctx->height;
printf("video_stream_index: %d\n", video_stream_index);
printf("width: %d\n", w);
printf("height: %d\n", h);
printf("nb_frames: %lld\n", ifmt_ctx->streams[video_stream_index]->nb_frames);
printf("avg_frame_rate.num: %d\n", ifmt_ctx->streams[video_stream_index]->avg_frame_rate.num);
printf("avg_frame_rate.den: %d\n", ifmt_ctx->streams[video_stream_index]->avg_frame_rate.den);
// 平均帧率
int avg_frame_rate = ifmt_ctx->streams[video_stream_index]->avg_frame_rate.num / ifmt_ctx->streams[video_stream_index]->avg_frame_rate.den;
printf("avg_frame_rate: %d\n", avg_frame_rate);
sws_ctx = sws_getContext(w, h, codec_para->format,
w, h, AV_PIX_FMT_RGB24,
SWS_BICUBIC, NULL, NULL, NULL);
if (sws_ctx == NULL) {
fprintf(stderr, "can not get SwsContext\n");
goto end;
}
// 设置帧参数
out_frame->format = AV_PIX_FMT_RGB24;
out_frame->width = codec_ctx->width;
out_frame->height = codec_ctx->height;
// 分配帧数据缓冲区
ret = av_frame_get_buffer(out_frame, 0);
if (ret < 0)
{
printf("av_frame_get_buffer failed\n");
goto end;
}
printf("out_frame->width: %d\n", out_frame->width);
printf("out_frame->height: %d\n", out_frame->height);
printf("out_frame->data: %p\n", out_frame->data);
printf("out_frame->data[0]: %p\n", out_frame->data[0]);
printf("out_frame->data[1]: %p\n", out_frame->data[1]);
printf("out_frame->data[2]: %p\n", out_frame->data[2]);
printf("out_frame->linesize[0]: %d\n", out_frame->linesize[0]);
printf("out_frame->linesize[1]: %d\n", out_frame->linesize[1]);
printf("out_frame->linesize[2]: %d\n", out_frame->linesize[2]);
while (1) {
AVStream *in_stream;
ret = av_read_frame(ifmt_ctx, pkt);
if (ret < 0)
break;
in_stream = ifmt_ctx->streams[pkt->stream_index];
if (pkt->stream_index != video_stream_index) {
av_packet_unref(pkt);
continue;
}
if (avcodec_send_packet(codec_ctx, pkt) == 0) {
while (avcodec_receive_frame(codec_ctx, frame) == 0) {
// 由于frame数据量太大,所以只每隔1秒保存一张yuv420p的图片
if (cnt % avg_frame_rate == 0) {
sws_scale(sws_ctx, (const uint8_t**) frame->data, frame->linesize, 0, frame->height, out_frame->data, out_frame->linesize);
snprintf(out_file, sizeof(out_file), out_filename, w, h, file_index++);
printf("out_file: %s\n", out_file);
FILE * f = fopen(out_file, "wb");
fwrite(out_frame->data[0], 1, w*h*3, f); // rgb24
fclose(f);
}
cnt++;
av_frame_unref(frame);
}
}
av_packet_unref(pkt);
}
// 读取codec_ctx剩余的帧
if (avcodec_send_packet(codec_ctx, NULL) == 0) {
while (avcodec_receive_frame(codec_ctx, frame) == 0) {
// 由于frame数据量太大,所以只每隔1秒保存一张yuv420p的图片
if (cnt % avg_frame_rate == 0) {
sws_scale(sws_ctx, (const uint8_t**) frame->data, frame->linesize, 0, frame->height, out_frame->data, out_frame->linesize);
snprintf(out_file, sizeof(out_file), out_filename, w, h, file_index++);
printf("out_file: %s\n", out_file);
FILE * f = fopen(out_file, "wb");
fwrite(out_frame->data[0], 1, w*h*3, f); // rgb24
fclose(f);
}
cnt++;
av_frame_unref(frame);
}
}
printf("Number of frames actually read: %d\n", cnt);
end:
sws_freeContext(sws_ctx);
av_packet_free(&pkt);
av_frame_free(&frame);
av_frame_free(&out_frame);
avcodec_free_context(&codec_ctx);
avformat_close_input(&ifmt_ctx);
return 0;
}