背景
QT MediaPlayer是一个很好用的QT自带的原生多媒体播放器,它的用法可以看相关文档即可,但要在windows下使用的话需要安装解码器,例如 :LAV Filters。
今天这篇文章我们分为两部分来讲,一是利用MediaPlayer得到视频的每一帧。二 是利用 VideoOutput 显示自定义的数据流。这两个部分是独立的,也就是说第一点我们得到的每一帧都可以自己控制渲染,不管是使用qwidget还是opengl的方式,具体做法可以参考我之前写的两篇文章:QT + VS2015 ,获取VLC每一帧并渲染到Qwidget,QT利用opengl 进行视频裁剪、拼接,4宫格,9宫格,文章中的数据源是VLC。
今天我们将两部分内容结合起来,也就是解码和渲染分别用QMediaPlayer和VideoOutput,但是过程不是被QT封装死的,而是我们自定义的。
Part1,QMediaPlayer 提取每一帧
在这一部分我们主要会用到QAbstractVideoSurface这个类,官网文档解释到:
The QAbstractVideoSurface class defines the standard interface that video producers use to inter-operate with video presentation surfaces. You can subclass this interface to receive video frames from sources like decoded media or cameras to perform your own processing.
QAbstractVideoSurface类定义了视频制作者用于与视频演示表面交互操作的标准接口。您可以将此接口子类化,以便从解码媒体或照相机等源接收视频帧,以执行您自己的处理。
因此我们需要自定义一个类继承自QAbstractVideoSurface,然后实现它的present和supportedPixelFormats方法,QMediaPlayer会自动将帧送到present函数中。
在QML中我们只需要这样用: 首先注册这个类到QML中,
qmlRegisterType<VideoSurfaces>("com.nova.videoSurfaces", 1, 0, "VideoSurfaces");
然后指定MediaPlayer的videooutput元素对象为VideoSurfaces对象,像下面这样:
import QtQuick 2.0
import QtMultimedia 5.15
import com.nova.videoSurfaces 1.0
Rectangle {
anchors.fill: parent
MediaPlayer {
id: player
source: "file:///D:/video/123.mp4"
autoLoad: true
autoPlay: true
videoOutput: videosurfaces
}
VideoSurfaces {
id: videosurfaces
}
}
//videosurface.h
#ifndef VIDEOSURFACE_H
#define VIDEOSURFACE_H
#include <QObject>
#include <QAbstractVideoSurface>
#include <QVideoSurfaceFormat>
class VideoSurfaces : public QAbstractVideoSurface
{
Q_OBJECT
public:
explicit VideoSurfaces(QObject *parent = 0);
~VideoSurfaces();
Q_INVOKABLE QVideoFrame::PixelFormat getpixformat();
Q_INVOKABLE void pause();
signals:
Q_INVOKABLE void sendImage(const QVideoFrame& frame);
protected:
bool present(const QVideoFrame &frame) override;
QList<QVideoFrame::PixelFormat> supportedPixelFormats(
QAbstractVideoBuffer::HandleType handleType =
QAbstractVideoBuffer::NoHandle) const override;
};
#endif // VIDEOSURFACE_H
//videosurface.cpp
#include "videosurface.h"
#include <QDebug>
VideoSurfaces::VideoSurfaces(QObject *parent) : QAbstractVideoSurface(parent)
{
}
VideoSurfaces::~VideoSurfaces()
{
this->stop();
}
QVideoFrame::PixelFormat VideoSurfaces::getpixformat()
{
qDebug() << "VideoSurfaces getformat";
return QVideoFrame::Format_YUV420P;
}
QList<QVideoFrame::PixelFormat> VideoSurfaces::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const
{
if (handleType == QAbstractVideoBuffer::NoHandle) {
qDebug() << "VideoSurface NoHandle supportedPixelFormats" << (void*)this;
QList<QVideoFrame::PixelFormat> listPixelFormats;
listPixelFormats << QVideoFrame::Format_ARGB32
<< QVideoFrame::Format_ARGB32_Premultiplied
<< QVideoFrame::Format_RGB32
<< QVideoFrame::Format_RGB24
<< QVideoFrame::Format_RGB565
<< QVideoFrame::Format_RGB555
<< QVideoFrame::Format_ARGB8565_Premultiplied
<< QVideoFrame::Format_BGRA32
<< QVideoFrame::Format_BGRA32_Premultiplied
<< QVideoFrame::Format_BGR32
<< QVideoFrame::Format_BGR24
<< QVideoFrame::Format_BGR565
<< QVideoFrame::Format_BGR555
<< QVideoFrame::Format_AYUV444
<< QVideoFrame::Format_AYUV444_Premultiplied
<< QVideoFrame::Format_YUV444
<< QVideoFrame::Format_YUV420P
<< QVideoFrame::Format_YV12
<< QVideoFrame::Format_UYVY
<< QVideoFrame::Format_YUYV
<< QVideoFrame::Format_NV12
<< QVideoFrame::Format_NV21
<< QVideoFrame::Format_IMC1
<< QVideoFrame::Format_IMC2
<< QVideoFrame::Format_Y8
<< QVideoFrame::Format_Y16
<< QVideoFrame::Format_Jpeg
<< QVideoFrame::Format_CameraRaw
<< QVideoFrame::Format_AdobeDng;
//qDebug() << listPixelFormats;
// Return the formats you will support
return listPixelFormats;
}
else {
return QList<QVideoFrame::PixelFormat>();
}
}
bool VideoSurfaces::present(const QVideoFrame &frame)
{
qDebug() << "VideoSurfaces present";
if (frame.isValid())
{
QVideoFrame cloneFrame(frame);
cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
QImage image(cloneFrame.bits(), cloneFrame.width(), cloneFrame.height(),
QVideoFrame::imageFormatFromPixelFormat(frame.pixelFormat()));
QVideoFrame f = QVideoFrame(image);
emit sendImage(f);
return true;
}
这样就会在我们的VideoSurfaces类的present函数中收到我们的帧。但此时一定要注意:这里面的帧数据QVideoFrame的是CPU无法寻址的,他的数据格式是GPU支持的格式,因此如果要在内存中使用或者通过信号槽发射的话,一定要进行map操作,并转化为QImage(我本人尝试了直接发送QVideoFrame不可行)。
Part2,自定义VideoOutput 数据源
要自定义的VideoOutput的数据源,官方文档是这样说的:
If you are extending your own C++ classes to interoperate with VideoOutput, you can either provide a QObject based class with a mediaObject property that exposes a QMediaObject derived class that has a QVideoRendererControl available, or you can provide a QObject based class with a writable videoSurface property that can accept a QAbstractVideoSurface based class and can follow the correct protocol to deliver QVideoFrames to it.
如果您要扩展自己的 C++ 类以与 VideoOutput 互操作,您可以提供一个基于 QObject 的类,该类具有一个 mediaObject 属性,该属性公开一个 QMediaObject 派生类,该类具有一个 QVideoRendererControl 可用,或者您可以提供一个基于 QObject 的类与一个可写的 videoSurface 属性 可以接受基于 QAbstractVideoSurface 的类,并且可以遵循正确的协议将 QVideoFrames 传递给它。
第二个方法也就是我们下面的例子,自定义一个继承自QObject的类,并持有一个QAbstractVideoSurface对象,同时要提供setVideoSurface方法和videoSurface方法,然后将接收到的帧传递给m_surface对象即可。
VideoOutput {
id: vo
anchors.fill: parent
source: frameProvider
fillMode: VideoOutput.Stretch
}
frameprovider.h
#include <QObject>
#include <QAbstractVideoSurface>
#include <QVideoSurfaceFormat>
#include <QDebug>
class FrameProvider : public QObject
{
Q_OBJECT
Q_PROPERTY(QAbstractVideoSurface *videoSurface READ videoSurface WRITE setVideoSurface)
public:
FrameProvider();
~FrameProvider();
QAbstractVideoSurface* videoSurface();
private:
QAbstractVideoSurface *m_surface = NULL;
QVideoSurfaceFormat m_format;
public:
void setVideoSurface(QAbstractVideoSurface* surface);
Q_INVOKABLE void setFormat(int width, int heigth, QVideoFrame::PixelFormat format);
public slots:
void onNewVideoContentReceived(const QVideoFrame& frame);
};
frameprovider.cpp
#include <frameprovider.h>
FrameProvider::FrameProvider()
{
qDebug() << "FrameProvider construct";
}
FrameProvider::~FrameProvider()
{
qDebug() << "FrameProvider destruct";
}
QAbstractVideoSurface* FrameProvider::videoSurface() {
qDebug() << "FrameProvider return videoSurface";
return m_surface;
}
void FrameProvider::setVideoSurface(QAbstractVideoSurface* surface)
{
qDebug() << "FrameProvider setVideoSurface:" << surface;
if (m_surface && m_surface != surface && m_surface->isActive()) {
m_surface->stop();
}
m_surface = surface;
if (m_surface && m_format.isValid())
{
m_format = m_surface->nearestFormat(m_format);
m_surface->start(m_format);
qDebug() << "FrameProvider setVideoSurface start m_surface ,m_format:" << m_format.pixelFormat();
}
}
void FrameProvider::setFormat(int width, int heigth, QVideoFrame::PixelFormat format)
{
qDebug() << "FrameProvider setFormat width:" << width << ".height:" << heigth << "format:" << format;
QSize size(width, heigth);
QVideoSurfaceFormat formats(size, format);
m_format = formats;
if (m_surface)
{
if (m_surface->isActive())
{
m_surface->stop();
}
m_format = m_surface->nearestFormat(m_format);
m_surface->start(m_format);
qDebug() << "FrameProvider setFormat start m_surface ,m_format:" << m_format.pixelFormat();
}
}
void FrameProvider::onNewVideoContentReceived(const QVideoFrame& frame)
{
qDebug() << "FrameProvider onNewVideoContentReceived";
if (m_surface) {
if (frame.isValid()) {
m_surface->present(frame);
}
}
}
给QML中注册:
qmlRegisterType<FrameProvider>("com.nova.frameProvider", 1, 0, "FrameProvider");
下面的代码是QML中使用完整的例子:
import QtQuick 2.0
import QtMultimedia 5.15
import com.nova.frameProvider 1.0
import com.nova.videoSurfaces 1.0
Rectangle {
anchors.fill: parent
property string url: "file:///D:/video/123.mp4"
MediaPlayer {
id: player
source: url
autoLoad: true
autoPlay: true
videoOutput: videosurfaces
}
function play() {
videosurfaces.sendImage.connect(frameProvider.onNewVideoContentReceived)
frameProvider.setFormat(parent.width, parent.height, videosurfaces.getpixformat())
player.play()
}
VideoOutput {
id: vo
anchors.fill: parent
source: frameProvider
fillMode: VideoOutput.Stretch
}
FrameProvider {
id: frameProvider
}
VideoSurfaces {
id: videosurfaces
}
}