利用ffmpeg4.3.2解码USB摄像头uvc JPEG视频流

139 阅读2分钟

基于libuvc,对某款USB摄像头采集视频流时,其视频格式为JPEG,sdl2渲染前,需要将其转成yuv420p格式,结合ffmpeg中avcodec_decode_video2先将其解码为yuyv,再使用sws_scale将其转为yuv420p,然后使用SDL2的SDL_UpdateYUVTexture完成渲染。

image.png

JpegDecoder.h

#ifndef JPEGDECODER_H
#define JPEGDECODER_H
#ifdef __cplusplus
extern "C" {
#endif
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/imgutils.h"
#include "libswresample/swresample.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include <libavutil/timestamp.h>
#ifdef __cplusplus
}
#endif

class JpegDecoder
{
private:
    AVCodec* codec;
    AVCodecContext* codecContext;
    AVFrame* yuvFrameBuf;
    AVPacket* jpegPacket;
    struct SwsContext * frame_sws_context;
    AVFrame* yuvFrameScale;
    FILE *fp_yuv;
    int frameCount;
    int scaleWidth;
    int scaleHeight;

public:
    JpegDecoder();
    void prepareFmpeg(int width, int height);
    void decodeJpeg(unsigned char* jpeg,int size);
    AVFrame* getAVFrameBuffer();
    void destroyFfmpeg();
    void sw_scale(int w, int h);
};

#endif // JPEGDECODER_H

JpegDecoder.cpp

#include "jpegdecoder.h"
#include <QDebug>

JpegDecoder::JpegDecoder()
{
    yuvFrameScale = NULL;
    frameCount = 0;
    scaleWidth = scaleHeight = 0;
}

void JpegDecoder::prepareFmpeg(int width, int height)
{
    avcodec_register_all();
    codec = avcodec_find_decoder(AV_CODEC_ID_MJPEG);
    codecContext = avcodec_alloc_context3(codec);
    avcodec_open2(codecContext,codec,nullptr);
    yuvFrameBuf = av_frame_alloc();
    jpegPacket = av_packet_alloc();
    av_init_packet(jpegPacket);
     qDebug()<<codecContext->pix_fmt << codecContext->width;
}

void JpegDecoder::decodeJpeg(unsigned char *jpeg, int size)
{
    jpegPacket->data = jpeg;
    jpegPacket->size = size;
    int got_picture_ptr = 0;
    avcodec_decode_video2(codecContext,yuvFrameBuf,&got_picture_ptr,jpegPacket);
    if(yuvFrameScale == NULL){
        yuvFrameScale = av_frame_alloc();
        unsigned char *out_buffer = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
                                                                         codecContext->width,
                                                                         codecContext->height, 1));
        av_image_fill_arrays(yuvFrameScale->data, yuvFrameScale->linesize, out_buffer,
            AV_PIX_FMT_YUV420P, codecContext->width, codecContext->height, 1);
        frame_sws_context = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt,
            codecContext->width, codecContext->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
    }
    if (got_picture_ptr){
        sws_scale(frame_sws_context, (const unsigned char* const*)yuvFrameBuf->data, yuvFrameBuf->linesize, 0, codecContext->height,
                   yuvFrameScale->data, yuvFrameScale->linesize);
#if 0
      if(frameCount < 300){
          if(fp_yuv == NULL){
              fp_yuv = fopen("uvc.yuv", "wb+");
          }
          frameCount++;
          int y_size = frame->width * frame->height;
          fwrite(yuvFrameScale->data[0], 1, y_size, fp_yuv);		//Y
          fwrite(yuvFrameScale->data[1], 1, y_size / 4, fp_yuv);	//U
          fwrite(yuvFrameScale->data[2], 1, y_size / 4, fp_yuv);	//V
      } else {
          if(fp_yuv){
              fclose(fp_yuv);
              fp_yuv = NULL;
          }
      }
#endif
    }
}

AVFrame *JpegDecoder::getAVFrameBuffer()
{
    return yuvFrameScale;
}

void JpegDecoder::sw_scale(int w, int h){
    av_frame_free(&yuvFrameScale);
    yuvFrameScale = NULL;
}

void JpegDecoder::destroyFfmpeg()
{
    av_free_packet(jpegPacket);
    av_frame_free(&yuvFrameBuf);
    avcodec_close(codecContext);
    avcodec_free_context(&codecContext);
}

image.png