摄像头数据的读出
首先需要将摄像头采集到的数据读出,那么就需要摄像头采集到数据的基本格式以及相关操作函数:
typedef struct VideoDevice {
int iFd;
int iPixelFormat;
int iWidth;
int iHeight;
int iVideoBufCnt;
int iVideoBufMaxLen;
int iVideoBufCurIndex;
unsigned char *pucVideBuf[NB_BUFFER];
/* 函数 */
PT_VideoOpr ptOPr;
}T_VideoDevice, *PT_VideoDevice;
函数结构体:
typedef struct VideoOpr {
char *name;
int (*InitDevice)(char *strDevName, PT_VideoDevice ptVideoDevice);
int (*ExitDevice)(PT_VideoDevice ptVideoDevice);
int (*GetFrame)(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf);
int (*PutFrame)(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf);
int (*StartDevice)(PT_VideoDevice ptVideoDevice);
int (*StopDevice)(PT_VideoDevice ptVideoDevice);
}T_VideoOpr, *PT_VideoOpr;
包括初始化,退出函数,从队列中获取buffer的函数以及将buffer放回队列的函数,还应该包括开启设备的函数以及停止设备工作的函数。·以上函数的实现都在v2l4.c中实现,下面就要实现这些函数。
v2l4.c的实现
static int V4l2InitDevice(char *strDevName, PT_VideoDevice ptVideoDevice)
{
int i;
int iFd;
int iError;
struct v4l2_capability tV4l2Cap;
struct v4l2_fmtdesc tFmtDesc;
struct v4l2_format tV4l2Fmt;
struct v4l2_requestbuffers tV4l2ReqBuffs;
struct v4l2_buffer tV4l2Buf;
int iLcdWidth;
int iLcdHeigt;
int iLcdBpp;
iFd = open(strDevName, O_RDWR);
if (iFd < 0)
{
DBG_PRINTF("can not open %s\n", strDevName);
return -1;
}
ptVideoDevice->iFd = iFd;
iError = ioctl(iFd, VIDIOC_QUERYCAP, &tV4l2Cap);
memset(&tV4l2Cap, 0, sizeof(struct v4l2_capability));
iError = ioctl(iFd, VIDIOC_QUERYCAP, &tV4l2Cap); // 为什么调用两次
if (iError) {
DBG_PRINTF("Error opening device %s: unable to query device.\n", strDevName);
goto err_exit;
}
if (!(tV4l2Cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
DBG_PRINTF("%s is not a video capture device\n", strDevName);
goto err_exit;
}
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING) {
DBG_PRINTF("%s supports streaming i/o\n", strDevName);
}
if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE) {
DBG_PRINTF("%s supports read i/o\n", strDevName);
}
memset(&tFmtDesc, 0, sizeof(tFmtDesc));
tFmtDesc.index = 0;
tFmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while ((iError = ioctl(iFd, VIDIOC_ENUM_FMT, &tFmtDesc)) == 0) {
if (isSupportThisFormat(tFmtDesc.pixelformat))
{
ptVideoDevice->iPixelFormat = tFmtDesc.pixelformat;
break;
}
tFmtDesc.index++;
}
if (!ptVideoDevice->iPixelFormat)
{
DBG_PRINTF("can not support the format of this device\n");
goto err_exit;
}
/* set format in */
GetDispResolution(&iLcdWidth, &iLcdHeigt, &iLcdBpp);
memset(&tV4l2Fmt, 0, sizeof(struct v4l2_format));
tV4l2Fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Fmt.fmt.pix.pixelformat = ptVideoDevice->iPixelFormat;
tV4l2Fmt.fmt.pix.width = iLcdWidth;
tV4l2Fmt.fmt.pix.height = iLcdHeigt;
tV4l2Fmt.fmt.pix.field = V4L2_FIELD_ANY;
/* 如果驱动程序发现无法某些参数(比如分辨率),
* 它会调整这些参数, 并且返回给应用程序
*/
iError = ioctl(iFd, VIDIOC_S_FMT, &tV4l2Fmt);
if (iError)
{
DBG_PRINTF("Unable to set format\n");
goto err_exit;
}
ptVideoDevice->iWidth = tV4l2Fmt.fmt.pix.width;
ptVideoDevice->iHeight = tV4l2Fmt.fmt.pix.height;
/* request buffers */
memset(&tV4l2ReqBuffs, 0, sizeof(struct v4l2_requestbuffers));
tV4l2ReqBuffs.count = NB_BUFFER;
tV4l2ReqBuffs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2ReqBuffs.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_REQBUFS, &tV4l2ReqBuffs);
if (iError)
{
DBG_PRINTF("Unable to allocate buffers.\n");
goto err_exit;
}
ptVideoDevice->iVideoBufCnt = tV4l2ReqBuffs.count;
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING)
{
/* map the buffers */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QUERYBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to query buffer.\n");
goto err_exit;
}
ptVideoDevice->iVideoBufMaxLen = tV4l2Buf.length;
ptVideoDevice->pucVideBuf[i] = mmap(0 /* start anywhere */ ,
tV4l2Buf.length, PROT_READ, MAP_SHARED, iFd,
tV4l2Buf.m.offset);
if (ptVideoDevice->pucVideBuf[i] == MAP_FAILED)
{
DBG_PRINTF("Unable to map buffer\n");
goto err_exit;
}
}
/* Queue the buffers. */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to queue buffer.\n");
goto err_exit;
}
}
}
else if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE)
{
g_tV4l2VideoOpr.GetFrame = V4l2GetFrameForReadWrite;
g_tV4l2VideoOpr.PutFrame = V4l2PutFrameForReadWrite;
/* read(fd, buf, size) */
ptVideoDevice->iVideoBufCnt = 1;
/* 在这个程序所能支持的格式里, 一个象素最多只需要4字节 */
ptVideoDevice->iVideoBufMaxLen = ptVideoDevice->iWidth * ptVideoDevice->iHeight * 4;
ptVideoDevice->pucVideBuf[0] = malloc(ptVideoDevice->iVideoBufMaxLen);
}
return 0;
err_exit:
close(iFd);
return -1;
}
最复杂的函数是是初始化函数,在初始化函数中需要涉及到应用层对硬件的操作,以上操作需要通过系统调用ioctl()来实现。
首先是打开设备:
iFd = open(strDevName, O_RDWR);
if (iFd < 0)
{
DBG_PRINTF("can not open %s\n", strDevName);
return -1;
}
ptVideoDevice->iFd = iFd;
首先确认设备特性:cmd-VIDIOC_QUERYCAP
iError = ioctl(iFd, VIDIOC_QUERYCAP, &tV4l2Cap);
memset(&tV4l2Cap, 0, sizeof(struct v4l2_capability));
iError = ioctl(iFd, VIDIOC_QUERYCAP, &tV4l2Cap); // 为什么调用两次
if (iError) {
DBG_PRINTF("Error opening device %s: unable to query device.\n", strDevName);
goto err_exit;
}
if (!(tV4l2Cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
DBG_PRINTF("%s is not a video capture device\n", strDevName);
goto err_exit;
}
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING) {
DBG_PRINTF("%s supports streaming i/o\n", strDevName);
}
if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE) {
DBG_PRINTF("%s supports read i/o\n", strDevName);
}
判定设备是否是V4L2_CAP_VIDEO_CAPTURE设备,判断设备是否支持STREAMING模式、READWRITE模式。
memset(&tFmtDesc, 0, sizeof(tFmtDesc));
tFmtDesc.index = 0;
tFmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while ((iError = ioctl(iFd, VIDIOC_ENUM_FMT, &tFmtDesc)) == 0) {
if (isSupportThisFormat(tFmtDesc.pixelformat))
{
ptVideoDevice->iPixelFormat = tFmtDesc.pixelformat;
break;
}
tFmtDesc.index++;
}
if (!ptVideoDevice->iPixelFormat)
{
DBG_PRINTF("can not support the format of this device\n");
goto err_exit;
}
常见的摄像头支持的视频格式是:YUYV, MJPEG, RGB565,这就需要判断应用程序是否支持这三种格式,如果不支持任何一种格式则返回错误。
static int isSupportThisFormat(int iPixelFormat)
{
int i;
for (i = 0; i < sizeof(g_aiSupportedFormats)/sizeof(g_aiSupportedFormats[0]); i++)
{
if (g_aiSupportedFormats[i] == iPixelFormat)
return 1;
}
return 0;
}
设置格式:
/* set format in */
GetDispResolution(&iLcdWidth, &iLcdHeigt, &iLcdBpp);
memset(&tV4l2Fmt, 0, sizeof(struct v4l2_format));
tV4l2Fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Fmt.fmt.pix.pixelformat = ptVideoDevice->iPixelFormat;
tV4l2Fmt.fmt.pix.width = iLcdWidth;
tV4l2Fmt.fmt.pix.height = iLcdHeigt;
tV4l2Fmt.fmt.pix.field = V4L2_FIELD_ANY;
/* 如果驱动程序发现无法某些参数(比如分辨率),
* 它会调整这些参数, 并且返回给应用程序
*/
iError = ioctl(iFd, VIDIOC_S_FMT, &tV4l2Fmt);
if (iError)
{
DBG_PRINTF("Unable to set format\n");
goto err_exit;
}
申请buffer,
/* request buffers */
memset(&tV4l2ReqBuffs, 0, sizeof(struct v4l2_requestbuffers));
tV4l2ReqBuffs.count = NB_BUFFER;
tV4l2ReqBuffs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2ReqBuffs.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_REQBUFS, &tV4l2ReqBuffs);
if (iError)
{
DBG_PRINTF("Unable to allocate buffers.\n");
goto err_exit;
}
获取内核buffer参数,映射到用户空间,并将申请的buffer放到队列中;
ptVideoDevice->iVideoBufCnt = tV4l2ReqBuffs.count;
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING)
{
/* map the buffers */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QUERYBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to query buffer.\n");
goto err_exit;
}
ptVideoDevice->iVideoBufMaxLen = tV4l2Buf.length;
ptVideoDevice->pucVideBuf[i] = mmap(0 /* start anywhere */ ,
tV4l2Buf.length, PROT_READ, MAP_SHARED, iFd,
tV4l2Buf.m.offset);
if (ptVideoDevice->pucVideBuf[i] == MAP_FAILED)
{
DBG_PRINTF("Unable to map buffer\n");
goto err_exit;
}
}
/* Queue the buffers. */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to queue buffer.\n");
goto err_exit;
}
}
}
此处mmap的意义是,将内核空间中申请的buffer直接映射到用户空间,简化后续的操作流程。
streaming模式的处理方式是:将申请的buffer放在队里上,如果有数据读回,就获取队列上的buffer,读取完成后再将buffer放回队列。以下函数分别从队列中取出和放回buffer。
static int V4l2GetFrameForStreaming(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
struct pollfd tFds[1];
int iRet;
struct v4l2_buffer tV4l2Buf;
/* poll */
tFds[0].fd = ptVideoDevice->iFd;
tFds[0].events = POLLIN;
iRet = poll(tFds, 1, -1);
if (iRet <= 0)
{
DBG_PRINTF("poll error!\n");
return -1;
}
/* VIDIOC_DQBUF */
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iRet = ioctl(ptVideoDevice->iFd, VIDIOC_DQBUF, &tV4l2Buf);
if (iRet < 0)
{
DBG_PRINTF("Unable to dequeue buffer.\n");
return -1;
}
ptVideoDevice->iVideoBufCurIndex = tV4l2Buf.index;
ptVideoBuf->iPixelFormat = ptVideoDevice->iPixelFormat;
ptVideoBuf->tPixelDatas.iWidth = ptVideoDevice->iWidth;
ptVideoBuf->tPixelDatas.iHeight = ptVideoDevice->iHeight;
ptVideoBuf->tPixelDatas.iBpp = (ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_YUYV) ? 16 : \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_MJPEG) ? : 0 \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_RGB565) : 16;
ptVideoBuf->tPixelDatas.iLineBytes = ptVideoDevice->iWidth * ptVideoBuf->tPixelDatas.iBpp / 8;
ptVideoBuf->tPixelDatas.iTotalBytes = tV4l2Buf->bytesused;
ptVideoBuf->tPixelDatas.aucPixelDatas = ptVideoDevice->pucVideBuf[tV4l2Buf.index];
return 0;
}
static int V4l2PutFrameForStreaming(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
/* VIDIOC_QBUF */
struct v4l2_buffer tV4l2Buf;
int iError;
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = ptVideoDevice->iVideoBufCurIndex;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(ptVideoDevice->iFd, VIDIOC_QBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to queue buffer.\n");
return -1;
}
return 0;
}
除了streaming模式,还有readwrite模式,read、write模式操作较为简单,如下:
else if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE)
{
g_tV4l2VideoOpr.GetFrame = V4l2GetFrameForReadWrite;
g_tV4l2VideoOpr.PutFrame = V4l2PutFrameForReadWrite;
/* read(fd, buf, size) */
ptVideoDevice->iVideoBufCnt = 1;
/* 在这个程序所能支持的格式里, 一个象素最多只需要4字节 */
ptVideoDevice->iVideoBufMaxLen = ptVideoDevice->iWidth * ptVideoDevice->iHeight * 4;
ptVideoDevice->pucVideBuf[0] = malloc(ptVideoDevice->iVideoBufMaxLen);
}
读数据函数:
static int V4l2GetFrameForReadWrite(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
int iRet;
iRet = read(ptVideoDevice->iFd, ptVideoDevice->pucVideBuf[0], ptVideoDevice->iVideoBufMaxLen);
if (iRet <= 0)
{
return -1;
}
ptVideoBuf->iPixelFormat = ptVideoDevice->iPixelFormat;
ptVideoBuf->tPixelDatas.iWidth = ptVideoDevice->iWidth;
ptVideoBuf->tPixelDatas.iHeight = ptVideoDevice->iHeight;
ptVideoBuf->tPixelDatas.iBpp = (ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_YUYV) ? 16 : \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_MJPEG) ? : 0 \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_RGB565) : 16;
ptVideoBuf->tPixelDatas.iLineBytes = ptVideoDevice->iWidth * ptVideoBuf->tPixelDatas.iBpp / 8;
ptVideoBuf->tPixelDatas.iTotalBytes = iRet;
ptVideoBuf->tPixelDatas.aucPixelDatas = ptVideoDevice->pucVideBuf[0];
return 0;
}
最后还要实现的是开关设备。开设备。
static int V4l2StartDevice(PT_VideoDevice ptVideoDevice)
{
int iType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int iError;
iError = ioctl(ptVideoDevice->iFd, VIDIOC_STREAMON, &iType);
if (iError)
{
DBG_PRINTF("Unable to start capture.\n");
return -1;
}
return 0;
}
关设备。
static int V4l2StopDevice(PT_VideoDevice ptVideoDevice)
{
int iType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int iError;
iError = ioctl(ptVideoDevice->iFd, VIDIOC_STREAMOFF, &iType);
if (iError)
{
DBG_PRINTF("Unable to stop capture.\n");
return -1;
}
return 0;
}
最后还要将结构体注册(放到链表上)。
static T_VideoOpr g_tV4l2VideoOpr = {
.name = "v4l2",
.InitDevice = V4l2InitDevice,
.ExitDevice = V4l2ExitDevice,
.GetFrame = V4l2GetFrameForStreaming,
.PutFrame = V4l2PutFrameForStreaming,
.StartDevice = V4l2StartDevice,
.StopDevice = V4l2StopDevice,
};
int V4l2Init(void)
{
return RegisterVideoOpr(&g_tV4l2VideoOpr);
}
video_manager.c的实现
这部分函数基本不需要修改,沿用display设备即可。
#include <config.h>
#include <video_manager.h>
#include <string.h>
static PT_VideoOpr g_ptVideoOprHead = NULL;
int RegisterVideoOpr(PT_VideoOpr ptVideoOpr)
{
PT_VideoOpr ptTmp;
if (!g_ptVideoOprHead)
{
g_ptVideoOprHead = ptVideoOpr;
ptVideoOpr->ptNext = NULL;
}
else
{
ptTmp = g_ptVideoOprHead;
while (ptTmp->ptNext)
{
ptTmp = ptTmp->ptNext;
}
ptTmp->ptNext = ptVideoOpr;
ptVideoOpr->ptNext = NULL;
}
return 0;
}
void ShowVideoOpr(void)
{
int i = 0;
PT_VideoOpr ptTmp = g_ptVideoOprHead;
while (ptTmp)
{
printf("%02d %s\n", i++, ptTmp->name);
ptTmp = ptTmp->ptNext;
}
}
PT_VideoOpr GetVideoOpr(char *pcName)
{
PT_VideoOpr ptTmp = g_ptVideoOprHead;
while (ptTmp)
{
if (strcmp(ptTmp->name, pcName) == 0)
{
return ptTmp;
}
ptTmp = ptTmp->ptNext;
}
return NULL;
}
int VideoInit(void)
{
int iError;
iError = V4l2Init();
return iError;
}
来源:CSDN
作者:不当大白菜
链接:https://blog.csdn.net/weixin_40474683/article/details/103920460