声明:本文是看完韦东山的驱视频所写的,如果有相关内容与其他网友相同,敬请原谅,如果这篇文章对你有帮助,那我将十分荣幸,
1.V4L2简介
V4L2(Video for Linuxtwo)是Linux下关于视频采集相关设备的驱动框架,为驱动和应用程序提供了一套统一的接口规范。V4L2支持内存映射方式(mmap)和直接读取方式(read)来采集数据,前者一般用于连续视频数据的采集,后者常用于静态数据的采集
2. APP采集视频数据步骤
转载从 v4l2框架
- 打开视频设备文件,进行视频采集的参数初始化,通过V4L2接口设置视频图像的采集窗口,采集的点阵大小和采集的格式。
- 申请若干视频采集的帧缓冲区,并将这些帧缓冲区从内核空间映射到用户空间,便于应用程序读取/处理视频数据
- 将申请到了帧缓冲区放在视频采集输入队排队,并启动视频采集
- 驱动开始视频数据的采集,应用程序从视频采集输出队列取出帧缓冲区,处理完成之后,将帧缓冲区重新放入视频采集输入队列,循环往复采集连续的视频数据
- 停止视频采集
具体的程序实现流程可参考下面流程图:
启动视频采集之后,驱动程序开始采集一帧数据,把采集到数据放入视频采集输入队列的第一个帧缓冲区,一帧数据采集完成之后,驱动程序将该帧缓冲区移至视频采集输出队列,等待应用程序从输出队列取出,驱动程序接下来采集下一帧数据,放入第二个帧缓冲区,同样帧缓冲区存满,下一帧数据后,被放入视频采集输出队列,应用程序从视频采集输出队列中取出含有视频数据的帧缓冲区,处理帧缓冲区中的视频数据,如存储或压缩,最后应用程序又将处理完数据的帧缓冲区程序放入视频采集输入队列之后,这样可以循环采集
3.V4L2编程
- 工作流程 打开设备->检查和设置设备属性->设置帧格式->帧缓冲管理->循环获取数据->关闭设备
- 设备的打开与关闭
- 设备的打开和关闭:
相关函数:
#include<fcntl.h>
int open(constchar *device_name, int flags);
#include <unistd.h>
int close(intfd);
例:
int fd=open(“/dev/video0”,O_RDWR);// 打开设备
close(fd);// 关闭设备
- 查询设备属性:VIDIOC_QUERYCAP
相关函数:
int ioctl(intfd, int request, struct v4l2_capability *argp);
structv4l2_capability
{
__u8 driver[16]; // 驱动名字
__u8 card[32]; // 设备名字
__u8bus_info[32]; // 设备在系统中的位置
__u32 version; // 驱动版本号
__u32capabilities; // 设备支持的操作
__u32reserved[4]; // 保留字段
};
capabilities 常用值:
V4L2_CAP_VIDEO_CAPTURE // 是否支持图像获取
5.帧格式:VIDIOC_ENUM_FMT
VIDIOC_ENUM_FMT// 显示所有支持的格式
int ioctl(intfd, int request, struct v4l2_fmtdesc *argp);
structv4l2_fmtdesc
{
__u32 index;// 要查询的格式序号,应用程序设置
enumv4l2_buf_type type;// 帧类型,应用程序设置
__u32 flags;// 是否为压缩格式
__u8 description[32];// 格式名称 __u32pixelformat; // 格式
__u32reserved[4]; // 保留
};
例如:
structv4l2_fmtdesc fmtdesc;
2. fmtdesc.index=0;
3. fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
4. printf("Supportformat:/n");
5. while(ioctl(fd,VIDIOC_ENUM_FMT,&fmtdesc)!=-1)
6. {
7. printf("/t%d.%s/n",fmtdesc.index+1,fmtdesc.description);
8. fmtdesc.index++;
9. }
设置当前格式:
tV4l2Fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Fmt.fmt.pix.pixelformat = ptVideoDevice->iPixelFormat;
tV4l2Fmt.fmt.pix.width = iLcdWidth;
tV4l2Fmt.fmt.pix.height = iLcdHeigt;
tV4l2Fmt.fmt.pix.field = V4L2_FIELD_ANY;
/* 如果驱动程序发现无法某些参数(比如分辨率),
它会调整这些参数, 并且返回给应用程序
*/
iError = ioctl(iFd, VIDIOC_S_FMT, &tV4l2Fmt);
6.申请帧缓冲区:应用程序和设备有三种数据交换的方法,直接read/write,内存映射,用户指针
- 内存映射方式: 申请帧缓冲区
VIDIOC_REQBUFS
2. int ioctl(intfd, int request, struct v4l2_requestbuffers *argp);
3. structv4l2_requestbuffers
4. {
5. __u32 count; // 缓冲区内缓冲帧的数目
6. enumv4l2_buf_type type; // 缓冲帧数据格式
7. enum v4l2_memorymemory; // 区别是内存映射还是用户指针方式
8. __u32 reserved[2];
9. };
11. enum v4l2_memoy{V4L2_MEMORY_MMAP,V4L2_MEMORY_USERPTR};
12. //count,type,memory都要应用程序设置
举例:申请一个拥有四个缓冲帧的缓冲区
structv4l2_requestbuffers req;
req.count=4;
req.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory=V4L2_MEMORY_MMAP;
ioctl(fd,VIDIOC_REQBUFS,&req);
- 获取每个缓冲帧的地址,长度:
VIDIOC_QUERYBUF
int ioctl(intfd, int request, struct v4l2_buffer *argp)
structv4l2_buffer
{
__u32 index; //buffer 序号
enumv4l2_buf_type type; //buffer 类型
__u32 byteused; //buffer 中已使用的字节数 __u32 flags; // 区分是MMAP 还是USERPTR
enum v4l2_fieldfield;
struct timevaltimestamp;// 获取第一个字节时的系统时间
structv4l2_timecode timecode;
__u32 sequence;// 队列中的序号
enum v4l2_memorymemory;//IO 方式,被应用程序设置
union m
{
__u32 offset;// 缓冲帧地址,只对MMAP 有效
unsigned longuserptr;
};
__u32 length;// 缓冲帧长度
__u32 input;
__u32 reserved;
};
mmap,定义一个结构体来映射每个缓冲帧:
Struct buffer
{
void* start;
unsigned intlength;
}*buffers;
#include<sys/mman.h>
void *mmap(void*addr, size_t length, int prot, int flags, int fd, off_t offset);
//addr 映射起始地址,一般为NULL ,让内核自动选择
//length 被映射内存块的长度
//prot 标志映射后能否被读写,其值为PROT_EXEC,PROT_READ,PROT_WRITE,PROT_NONE
//flags 确定此内存映射能否被其他进程共享,MAP_SHARED,MAP_PRIVATE
//fd,offset, 确定被映射的内存地址
返回成功映射后的地址,不成功返回MAP_FAILED ((void*)-1);
int munmap(void*addr, size_t length);// 断开映射
//addr 为映射后的地址,length 为映射后的内存长度
举例:将四个申请到的缓冲帧映射到应用程序,用buffer来记录:
buffers =(buffer*)calloc (req.count, sizeof (*buffers));
if (!buffers) {
exit(EXIT_FAILURE);
}
映射:
ptVideoDevice->pucVideBuf[i] = mmap(0 /* start anywhere */ ,
tV4l2Buf.length, PROT_READ, MAP_SHARED, iFd,
tV4l2Buf.m.offset);
7.缓冲区处理后之后,就开始获取数据了
. // 启动/ 停止数据流
VIDIOC_STREAMON,VIDIOC_STREAMOFF
int ioctl(intfd, int request, const int *argp);
//argp 为流类型指针,如V4L2_BUF_TYPE_VIDEO_CAPTURE.
在开始之前,还应当把缓冲帧放入缓冲队列:
VIDIOC_QBUF// 把帧放入队列
VIDIOC_DQBUF// 从队列中取出帧
int ioctl(intfd, int request, struct v4l2_buffer *argp);
例如:将四个缓冲帧放入队列中,并启动数据流
unsigned int i;
2. enum v4l2_buf_typetype;
3. // 将缓冲帧放入队列
4. for (i = 0; i< 4; ++i)
5. {
6. structv4l2_buffer buf;
7. buf.type =V4L2_BUF_TYPE_VIDEO_CAPTURE;
8. buf.memory =V4L2_MEMORY_MMAP;
9. buf.index = i;
10. ioctl (fd,VIDIOC_QBUF, &buf);
11. }
12. type =V4L2_BUF_TYPE_VIDEO_CAPTURE;
13. ioctl (fd,VIDIOC_STREAMON, &type);
获取一帧数据并处理:
struct pollfd tFds[1];
int iRet;
struct v4l2_buffer tV4l2Buf;
/* poll */
tFds[0].fd = ptVideoDevice->iFd;
tFds[0].events = POLLIN;
iRet = poll(tFds, 1, -1);
if (iRet <= 0)
{
DBG_PRINTF("poll error!\n");
return -1;
}
/* VIDIOC_DQBUF */
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iRet = ioctl(ptVideoDevice->iFd, VIDIOC_DQBUF, &tV4l2Buf);
if (iRet < 0)
{
DBG_PRINTF("Unable to dequeue buffer.\n");
return -1;
}
ptVideoDevice->iVideoBufCurIndex = tV4l2Buf.index;
ptVideoBuf->iPixelFormat = ptVideoDevice->iPixelFormat;
ptVideoBuf->tPixelDatas.iWidth = ptVideoDevice->iWidth;
ptVideoBuf->tPixelDatas.iHeight = ptVideoDevice->iHeight;
ptVideoBuf->tPixelDatas.iBpp = (ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_YUYV) ? 16 : \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_MJPEG) ? 0 : \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_RGB565) ? 16 : \
0;
ptVideoBuf->tPixelDatas.iLineBytes = ptVideoDevice->iWidth * ptVideoBuf->tPixelDatas.iBpp / 8;
ptVideoBuf->tPixelDatas.iTotalBytes = tV4l2Buf.bytesused;
ptVideoBuf->tPixelDatas.aucPixelDatas = ptVideoDevice->pucVideBuf[tV4l2Buf.index];
return 0;
完整代码:
#ifndef _VIDEO_MANAGER_H
#define _VIDEO_MANAGER_H
#include <config.h>
#include <pic_operation.h>
#include <linux/videodev2.h>
#define NB_BUFFER 4
struct VideoDevice;
struct VideoOpr;
typedef struct VideoDevice T_VideoDevice, *PT_VideoDevice;
typedef struct VideoOpr T_VideoOpr, *PT_VideoOpr;
struct VideoDevice {
int iFd;
int iPixelFormat;
int iWidth;
int iHeight;
int iVideoBufCnt;
int iVideoBufMaxLen;
int iVideoBufCurIndex;
unsigned char *pucVideBuf[NB_BUFFER];
/* 函数 */
PT_VideoOpr ptOPr;
};
typedef struct VideoBuf {
T_PixelDatas tPixelDatas;
int iPixelFormat;
}T_VideoBuf, *PT_VideoBuf;
struct VideoOpr {
char *name;
int (*InitDevice)(char *strDevName, PT_VideoDevice ptVideoDevice);
int (*ExitDevice)(PT_VideoDevice ptVideoDevice);
int (*GetFrame)(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf);
int (*GetFormat)(PT_VideoDevice ptVideoDevice);
int (*PutFrame)(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf);
int (*StartDevice)(PT_VideoDevice ptVideoDevice);
int (*StopDevice)(PT_VideoDevice ptVideoDevice);
struct VideoOpr *ptNext;
};
int VideoDeviceInit(char *strDevName, PT_VideoDevice ptVideoDevice);
int V4l2Init(void);
int RegisterVideoOpr(PT_VideoOpr ptVideoOpr);
int VideoInit(void);
#endif /* _VIDEO_MANAGER_H */
int i;
int iFd;
int iError;
struct v4l2_capability tV4l2Cap;
struct v4l2_fmtdesc tFmtDesc;
struct v4l2_format tV4l2Fmt;
struct v4l2_requestbuffers tV4l2ReqBuffs;
struct v4l2_buffer tV4l2Buf;
int iLcdWidth;
int iLcdHeigt;
int iLcdBpp;
iFd = open(strDevName, O_RDWR);
if (iFd < 0)
{
DBG_PRINTF("can not open %s\n", strDevName);
return -1;
}
ptVideoDevice->iFd = iFd;
iError = ioctl(iFd, VIDIOC_QUERYCAP, &tV4l2Cap);
memset(&tV4l2Cap, 0, sizeof(struct v4l2_capability));
iError = ioctl(iFd, VIDIOC_QUERYCAP, &tV4l2Cap);
if (iError) {
DBG_PRINTF("Error opening device %s: unable to query device.\n", strDevName);
goto err_exit;
}
if (!(tV4l2Cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
DBG_PRINTF("%s is not a video capture device\n", strDevName);
goto err_exit;
}
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING) {
DBG_PRINTF("%s supports streaming i/o\n", strDevName);
}
if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE) {
DBG_PRINTF("%s supports read i/o\n", strDevName);
}
memset(&tFmtDesc, 0, sizeof(tFmtDesc));
tFmtDesc.index = 0;
tFmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while ((iError = ioctl(iFd, VIDIOC_ENUM_FMT, &tFmtDesc)) == 0) {
if (isSupportThisFormat(tFmtDesc.pixelformat))
{
ptVideoDevice->iPixelFormat = tFmtDesc.pixelformat;
break;
}
tFmtDesc.index++;
}
if (!ptVideoDevice->iPixelFormat)
{
DBG_PRINTF("can not support the format of this device\n");
goto err_exit;
}
/* set format in */
GetDispResolution(&iLcdWidth, &iLcdHeigt, &iLcdBpp);
memset(&tV4l2Fmt, 0, sizeof(struct v4l2_format));
tV4l2Fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Fmt.fmt.pix.pixelformat = ptVideoDevice->iPixelFormat;
tV4l2Fmt.fmt.pix.width = iLcdWidth;
tV4l2Fmt.fmt.pix.height = iLcdHeigt;
tV4l2Fmt.fmt.pix.field = V4L2_FIELD_ANY;
/* 如果驱动程序发现无法某些参数(比如分辨率),
* 它会调整这些参数, 并且返回给应用程序
*/
iError = ioctl(iFd, VIDIOC_S_FMT, &tV4l2Fmt);
if (iError)
{
DBG_PRINTF("Unable to set format\n");
goto err_exit;
}
ptVideoDevice->iWidth = tV4l2Fmt.fmt.pix.width;
ptVideoDevice->iHeight = tV4l2Fmt.fmt.pix.height;
/* request buffers */
memset(&tV4l2ReqBuffs, 0, sizeof(struct v4l2_requestbuffers));
tV4l2ReqBuffs.count = NB_BUFFER;
tV4l2ReqBuffs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2ReqBuffs.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_REQBUFS, &tV4l2ReqBuffs);
if (iError)
{
DBG_PRINTF("Unable to allocate buffers.\n");
goto err_exit;
}
ptVideoDevice->iVideoBufCnt = tV4l2ReqBuffs.count;
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING)
{
/* map the buffers */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QUERYBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to query buffer.\n");
goto err_exit;
}
ptVideoDevice->iVideoBufMaxLen = tV4l2Buf.length;
ptVideoDevice->pucVideBuf[i] = mmap(0 /* start anywhere */ ,
tV4l2Buf.length, PROT_READ, MAP_SHARED, iFd,
tV4l2Buf.m.offset);
if (ptVideoDevice->pucVideBuf[i] == MAP_FAILED)
{
DBG_PRINTF("Unable to map buffer\n");
goto err_exit;
}
}
/* Queue the buffers. */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to queue buffer.\n");
goto err_exit;
}
}
}
else if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE)
{
g_tV4l2VideoOpr.GetFrame = V4l2GetFrameForReadWrite;
g_tV4l2VideoOpr.PutFrame = V4l2PutFrameForReadWrite;
/* read(fd, buf, size) */
ptVideoDevice->iVideoBufCnt = 1;
/* 在这个程序所能支持的格式里, 一个象素最多只需要4字节 */
ptVideoDevice->iVideoBufMaxLen = ptVideoDevice->iWidth * ptVideoDevice->iHeight * 4;
ptVideoDevice->pucVideBuf[0] = malloc(ptVideoDevice->iVideoBufMaxLen);
}
ptVideoDevice->ptOPr = &g_tV4l2VideoOpr;
return 0;
err_exit:
close(iFd);
return -1;
}
static int V4l2ExitDevice(PT_VideoDevice ptVideoDevice)
{
int i;
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
if (ptVideoDevice->pucVideBuf[i])
{
munmap(ptVideoDevice->pucVideBuf[i], ptVideoDevice->iVideoBufMaxLen);
ptVideoDevice->pucVideBuf[i] = NULL;
}
}
close(ptVideoDevice->iFd);
return 0;
}
static int V4l2GetFrameForStreaming(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
struct pollfd tFds[1];
int iRet;
struct v4l2_buffer tV4l2Buf;
/* poll */
tFds[0].fd = ptVideoDevice->iFd;
tFds[0].events = POLLIN;
iRet = poll(tFds, 1, -1);