Android+FFmpeg+Surface视频播放

545 阅读2分钟

1. Android端的SurfaceView

//继承SurfaceView,
public class VideoView extends SurfaceView implements SurfaceHolder.Callback {
    public VideoView(Context context) {
        super(context);
    }
    ...
    public VideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        init();
    }

    private SurfaceHolder holder;
    private NativeLib nativeLib;
    private void init() {
        holder = getHolder();
        holder.addCallback(this);
        nativeLib = new NativeLib();
    }


//在surfaceCreate中,在线程中 调用jni中的videoPlay方法
    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        new Thread(new Runnable() {
            @Override
            public void run() {  
                nativeLib.videoPlay(holder.getSurface(),
                    "/sdcard/DCIM/Camera/lv_0_20220323141837.mp4");
            }
        }).start();
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
    }
}



public class NativeLib {
    static {
        System.loadLibrary("nativelib");
    }
    //jni方法
    public native void videoPlay(Surface surface,String inputPath);
}

2. jni中的方法

//nativelib.cpp
//头文件引入
#include <jni.h>
#include <string>
#include "android/log.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>

extern "C" {
#include "libavcodec/avcodec.h"
#include <libavformat/avformat.h>
#include "libswscale/swscale.h"
#include <libavutil/imgutils.h>
}


//工具方法: jstring转成char *
char * jstringToChar(JNIEnv *env, jstring str) {
    char *rtn = NULL;
    jclass classString = env->FindClass("java/lang/String");
    jstring strEncode = env->NewStringUTF("UTF-8");
    jmethodID mid = env->GetMethodID(classString, "getBytes", "(Ljava/lang/String;)[B");
    jbyteArray barr = (jbyteArray) env->CallObjectMethod(str, mid, strEncode);
    jsize length = env->GetArrayLength(barr);
    jbyte *ba = env->GetByteArrayElements(barr,JNI_FALSE);
    if (length > 0){
        rtn = (char *)malloc(length+1);
        memcpy(rtn,ba,length);
        rtn[length] = 0;
    }
    env->ReleaseByteArrayElements(barr,ba,0);
    return rtn;

}


extern "C" JNIEXPORT void JNICALL
Java_com_vivo_finger_nativelib_NativeLib_videoPlay(JNIEnv *env, jobject,
                                                   jobject surface, jstring inputPath) {
//    char *inputPathChar = "/sdcard/DCIM/Camera/lv_0_20220323141837.mp4";

    char *path = jstringToChar(env,inputPath);
    //avformat init
    avformat_network_init();
    AVFormatContext *avFormatContext = avformat_alloc_context();
    //打开path中的视频文件,并将AVFormatContext初始化
    avformat_open_input(&avFormatContext, path, NULL, NULL);
    //完善AVFormatContext中的信息
    avformat_find_stream_info(avFormatContext, NULL);
    //找到video所在流的位置
    int video_index = -1;
    for (int i = 0; i < avFormatContext->nb_streams; ++i) {
        if (avFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_index = i;
        }
    }
    AVCodecContext *avCodecContext = avFormatContext->streams[video_index]->codec;
    AVCodec *avCodec = avcodec_find_decoder(avCodecContext->codec_id);
    if (avcodec_open2(avCodecContext, avCodec, NULL) < 0) {
        __android_log_write(ANDROID_LOG_DEFAULT, "videoPlay", "打开失败");
        return;
    }



    AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));
    av_init_packet(packet);
    AVFrame *frame = av_frame_alloc();//分配一个AVFrame结构体,AVFrame结构体一般用于存储原始数据,指向解码后的原始帧YUV frame
    AVFrame *rgb_frame = av_frame_alloc();//分配一个AVFrame结构体,指向存放转换成rgb后的帧

    //获取在avCodecContext的宽高下,rbga图像所需要的字节大小
    int buffer_size = av_image_get_buffer_size(AV_PIX_FMT_RGBA,
                                               avCodecContext->width,avCodecContext->height,1);
    //申请一个rgba图像的空间,用于存放后面的rgba图像
    uint8_t *out_buffer = (uint8_t *) av_malloc(buffer_size * sizeof(uint8_t));
    //把rgb_frame中的data数组的地址指向out_buffer,
    av_image_fill_arrays(rgb_frame->data, rgb_frame->linesize, out_buffer, AV_PIX_FMT_RGBA,
                         avCodecContext->width, avCodecContext->height, 1);

    //ndk中带的连接surface的方法
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
    ANativeWindow_Buffer native_outBuffer;
    ANativeWindow_setBuffersGeometry(nativeWindow, avCodecContext->width, avCodecContext->height,
                                     WINDOW_FORMAT_RGBA_8888);

    //获取swsContent
    SwsContext *sws_context = sws_getContext(avCodecContext->width, avCodecContext->height,
                                             avCodecContext->pix_fmt,
                                             avCodecContext->width, avCodecContext->height,
                                             AV_PIX_FMT_RGBA, SWS_BICUBIC, NULL, NULL, NULL);




    //解码
    while (av_read_frame(avFormatContext, packet) >= 0) {
        if (packet->stream_index == video_index) {
            int ret = avcodec_send_packet(avCodecContext, packet);
            av_packet_unref(packet);
            if (ret != 0) {
                continue;
            }
            //这里avcodec_send_packet和avcodec_receive_frame是一对
            ret = avcodec_receive_frame(avCodecContext, frame);//获取解码帧
            if (ret != 0) {
                continue;
            }

            ANativeWindow_lock(nativeWindow, &native_outBuffer, NULL);
            //转换为rgb格式
            sws_scale(sws_context, (const uint8_t *const *) frame->data, frame->linesize, 0,
                      frame->height,
                      rgb_frame->data, rgb_frame->linesize);

            //  rgb_frame是有画面数据
            uint8_t *dst = (uint8_t *) native_outBuffer.bits;//native_outBuffer字节数组的地址
            //  拿到一行有多少个字节 RGBA;native_outBuffer.stride表示一行有多少个像素
            int destStride = native_outBuffer.stride * 4;//计算一行有多少个字节
            //像素数据的首地址
            uint8_t *src = rgb_frame->data[0];
            //  实际内存一行数量
            int srcStride = rgb_frame->linesize[0];
            //int i=0;
            for (int i = 0; i < avCodecContext->height; ++i) {
                //  memcpy(void *dest, const void *src, size_t n)
                //将rgb_frame中每一行的数据复制给nativewindow
                memcpy(dst + i * destStride, src + i * srcStride, srcStride);
            }
            //解锁
            ANativeWindow_unlockAndPost(nativeWindow);
        }
    }


//释放空间
    ANativeWindow_release(nativeWindow);
    av_free(&frame);
    av_free(&rgb_frame);
    avcodec_close(avCodecContext);
    avformat_free_context(avFormatContext);


}