100ASK_IMX6ULL-PRO 数码相框拓展项目:支持相机

本篇内容基于百问网嵌入式Linux项目数码相框与文件浏览器嵌入式Linux相机

目的:增加相机界面,支持实时播放和截屏功能。

框架说明

V4L2 简介

V4L2 是 linux 操作系统下一套用于采集图片、视频和音频数据的通用 API 接口,配合适当的视频采集设备和相应的驱动程序,可以实现图片、视频、音频等的采集

V4L2 驱动的摄像头的设备文件一般是/dev/videoX(X 为任意数字,与具体的设备相对应)

V4L2 视频采集步骤:

(1)打开设备,进行初始化参数设置,通过 V4L2 接口设置视频图像的采集窗口、采集的点阵大小和格式;

(2)申请图像帧缓冲,并进行内存映射,将这些帧缓冲区从内核空间映射到用户空间,便于应用程序读取、处理图像数据;

(3)将帧缓冲进行入队操作,启动视频采集;

(4)驱动开始视频数据的采集,应用程序从视频采集输出队列取出帧缓冲区,处理完后,将帧缓冲区重新放入视频采集输入队列,循环往复采集连续的视频数据;

(5)释放资源,停止采集工作

本次项目中需要进行连续视频数据的采集,采用速度较快的内存映射方式(mmap)进行V4L2图像采集。

主要模块

显示模块

  • 格式转换:从YUV/Mjpeg/RGB 转换 LCD RGB格式
  • LCD 显示:统一的Frame BUffer控制,支持 VideoMem 视频页面缓冲管理

视频模块

  • V4L2 设备管理:启动和关闭,帧数据的采集。通过具体的 ioctl 命令完成。
struct VideoDevice {
int iFd;
int iPixelFormat;
int iWidth;
int iHeight;
int iVideoBufCnt;
int iVideoBufMaxLen;
int iVideoBufCurIndex;
unsigned char *pucVideBuf[NB_BUFFER];
/* 函数 */
PT_VideoOpr ptOPr;
};
struct VideoOpr {
char *name;
int (*InitDevice)(char *strDevName, PT_VideoDevice ptVideoDevice);
int (*ExitDevice)(PT_VideoDevice ptVideoDevice);
int (*GetFrame)(PT_VideoDevice ptVideoDevice, PT_PixelDatas ptPixelDatas);
int (*GetFormat)(PT_VideoDevice ptVideoDevice);
int (*PutFrame)(PT_VideoDevice ptVideoDevice, PT_PixelDatas ptPixelDatas);
int (*StartDevice)(PT_VideoDevice ptVideoDevice);
int (*StopDevice)(PT_VideoDevice ptVideoDevice);
struct VideoOpr *ptNext;
};

核心流程

实现思路说明

新增视频播放界面 video_page

打开和关闭摄像头设备

需要注意的是关闭设备后重新打开需要重置缓冲区,理由如下:

  • 状态丢失:当关闭 V4L2 设备时,设备相关的状态信息会被清除。缓冲区的状态(如已分配的内存、缓冲区的索引、数据的填充状态等)也会丢失。重新打开设备后,这些状态需要重新初始化,以确保缓冲区能够正确地用于存储新的视频数据。
  • 防止错误数据和冲突:如果不重置缓冲区,之前关闭设备时缓冲区中可能残留的旧数据或者不完整的数据可能会干扰新的视频数据捕获过程。而且,缓冲区的一些内部指针和计数器等信息可能已经混乱,重新使用可能会导致程序崩溃或者产生错误的视频数据
static int V4l2StartDevice(PT_VideoDevice ptVideoDevice)
{
int iType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int iError;
iError = RequstBuffer(ptVideoDevice);
if (iError)
{
DBG_PRINTF("Unable to RequstBuffer.\n");
return -1;
}
iError = ioctl(ptVideoDevice->iFd, VIDIOC_STREAMON, &iType);
if (iError)
{
DBG_PRINTF("Unable to start capture.\n");
return -1;
}
return 0;
}
static int V4l2StopDevice(PT_VideoDevice ptVideoDevice)
{
int iType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int iError;
iError = FreeBuffer(ptVideoDevice);
if (iError)
{
DBG_PRINTF("Unable to FreeBuffer.\n");
return -1;
}
iError = ioctl(ptVideoDevice->iFd, VIDIOC_STREAMOFF, &iType);
if (iError)
{
DBG_PRINTF("Unable to stop capture.\n");
return -1;
}
return 0;
}
启动线程持续处理数据并显示
  • 从队列中获取帧数据
  • 转换RGB格式
  • 缩放并合并到当前界面缓冲的指定播放显示区域
  • 刷新到显示设备
static void *VideoPlayThreadFunction(void *pVoid)
{
int iPixelFormatOfVideo;
int iPixelFormatOfDisp;
PT_VideoDevice ptVideoDevice = GetVideoDevice();
PT_VideoConvert ptVideoConvert;
PT_PixelDatas ptVideoBufCur;
T_PixelDatas tVideoBuf;
T_PixelDatas tConvertBuf;
T_PixelDatas tZoomBuf;
// 初始化操作
PT_VideoMem ptVideoMemCur = GetDevVideoMem(); //GetCurVideoMem();
int iLcdWidth, iLcdHeigt, iLcdBpp;
int iWidth = g_tVideoPicLayout.iBotRightX - g_tVideoPicLayout.iTopLeftX + 1;
int iHeight = g_tVideoPicLayout.iBotRightY - g_tVideoPicLayout.iTopLeftY + 1;
int iError;
int iCpature;
time_t timep;
struct tm *tmp;
char strTimeBuf[64];
int k;
GetDispResolution(&iLcdWidth, &iLcdHeigt, &iLcdBpp);
iPixelFormatOfDisp = (iLcdBpp == 16) ? V4L2_PIX_FMT_RGB565 : \
(iLcdBpp == 32) ? V4L2_PIX_FMT_RGB32 : \
0;
iPixelFormatOfVideo = GetVideoDeviceFormat(NULL);
ptVideoConvert = GetVideoConvertForFormats(iPixelFormatOfVideo, iPixelFormatOfDisp);
if (NULL == ptVideoConvert)
{
DBG_PRINTF("can not support this format convert\n");
return NULL;
}
memset(&tVideoBuf, 0, sizeof(tVideoBuf));
memset(&tConvertBuf, 0, sizeof(tConvertBuf));
tConvertBuf.iBpp = iLcdBpp;
memset(&tZoomBuf, 0, sizeof(tZoomBuf));
while(1){
pthread_mutex_lock(&g_tVideoPlayThreadMutex);
iError = g_bThreadExit;
pthread_mutex_unlock(&g_tVideoPlayThreadMutex);
if (iError)
return NULL;
/* 从队列中读入摄像头数据 */
iError = ptVideoDevice->ptOPr->GetFrame(ptVideoDevice, &tVideoBuf);
if (iError)
{
DBG_PRINTF("Video GetFrame error!\n");
break;
}
ptVideoBufCur = &tVideoBuf;
/* 转换为RGB格式 */
if (iPixelFormatOfVideo != iPixelFormatOfDisp)
{
iError = ptVideoConvert->Convert(iPixelFormatOfVideo, iPixelFormatOfDisp, &tVideoBuf, &tConvertBuf);
if (iError)
{
DBG_PRINTF("Convert %s error!\n", ptVideoConvert->name);
break;
}
ptVideoBufCur = &tConvertBuf;
}
/* 如果图像分辨率大于显示区域, 缩放 */
if (ptVideoBufCur->iWidth > iWidth || ptVideoBufCur->iHeight > iHeight)
{
/* 按比例缩放 */
k = (float)ptVideoBufCur->iHeight / ptVideoBufCur->iWidth;
tZoomBuf.iWidth = iWidth;
tZoomBuf.iHeight = iWidth * k;
if ( tZoomBuf.iHeight > iHeight)
{
tZoomBuf.iWidth = iHeight / k;
tZoomBuf.iHeight = iHeight;
}
tZoomBuf.iBpp = iLcdBpp;
tZoomBuf.iLineBytes = tZoomBuf.iWidth * tZoomBuf.iBpp / 8;
tZoomBuf.iTotalBytes = tZoomBuf.iLineBytes * tZoomBuf.iHeight;
if (!tZoomBuf.aucPixelDatas)
{
tZoomBuf.aucPixelDatas = malloc(tZoomBuf.iTotalBytes);
}
PicZoom(ptVideoBufCur, &tZoomBuf);
ptVideoBufCur = &tZoomBuf;
}
// 刷新显示到设备上
PicMerge(g_tVideoPicLayout.iTopLeftX, g_tVideoPicLayout.iTopLeftY, ptVideoBufCur, &(ptVideoMemCur->tPixelDatas));
FlushVideoMemToDev(ptVideoMemCur);
// 截屏操作
pthread_mutex_lock(&g_tVideoCaptureThreadMutex);
iCpature = g_bThreadCpature;
g_bThreadCpature = 0;
pthread_mutex_unlock(&g_tVideoCaptureThreadMutex);
if (iCpature){
time (&timep);
tmp = gmtime(&timep);
strftime(strTimeBuf, sizeof(strTimeBuf), "picture-%Y%m%d-%H%M%S.bmp", tmp);
printf("photos name: %s\n", strTimeBuf);
Parser("bmp")->SaveFile(ptVideoBufCur->aucPixelDatas, ptVideoBufCur->iWidth, ptVideoBufCur->iHeight, ptVideoBufCur->iBpp, strTimeBuf);
}
PutVideoMem(ptVideoMemCur);
/* 重新加入空闲队列 */
iError = ptVideoDevice->ptOPr->PutFrame(ptVideoDevice, &tVideoBuf);
if (iError)
{
DBG_PRINTF("Video PutFrame error!\n");
break;
}
}
if (tConvertBuf.aucPixelDatas)
free(tConvertBuf.aucPixelDatas);
if (tZoomBuf.aucPixelDatas)
free(tZoomBuf.aucPixelDatas);
return NULL;
}

线程负责循环读取帧数据并处理后显示到设备上,同时基于线程锁控制“退出”和“截屏”操作

截屏功能

截屏按钮点击事件处理,将当前捕获帧数据转换为RGB,按照格式填充头部信息后保存为bmp文件,文件名采用当前时间。

static int SaveRgbAsBMP(unsigned char * pRgb, unsigned int dwWidth, unsigned int dwHeight, unsigned int dwBpp, char *strFilename)
{
BITMAPFILEHEADER tBmpFileHead;
BITMAPINFOHEADER tBmpInfoHead;
unsigned int dwSize;
unsigned char *pPos = 0;
FILE * fout;
memset(&tBmpFileHead, 0, sizeof(BITMAPFILEHEADER));
memset(&tBmpInfoHead, 0, sizeof(BITMAPINFOHEADER));
fout = fopen(strFilename, "w");
if (!fout)
{
DBG_PRINTF("Can't create output file %s\n", strFilename);
return -2;
}
tBmpFileHead.bfType = 0x4d42;
tBmpFileHead.bfSize = 0x36 + dwWidth * dwHeight * (dwBpp / 8);
tBmpFileHead.bfOffBits = 0x00000036;
tBmpInfoHead.biSize = 0x00000028;
tBmpInfoHead.biWidth = dwWidth;
tBmpInfoHead.biHeight = dwHeight;
tBmpInfoHead.biPlanes = 0x0001;
tBmpInfoHead.biBitCount = dwBpp;
tBmpInfoHead.biCompression = 0;
tBmpInfoHead.biSizeImage = dwWidth * dwHeight * (dwBpp / 8);
tBmpInfoHead.biXPelsPerMeter = 0;
tBmpInfoHead.biYPelsPerMeter = 0;
tBmpInfoHead.biClrUsed = 0;
tBmpInfoHead.biClrImportant = 0;
if (fwrite(&tBmpFileHead, 1, sizeof(tBmpFileHead), fout) != sizeof(tBmpFileHead))
{
DBG_PRINTF("Can't write BMP File Head to %s\n", strFilename);
return -3;
}
if (fwrite(&tBmpInfoHead, 1, sizeof(tBmpInfoHead), fout) != sizeof(tBmpInfoHead))
{
DBG_PRINTF("Can't write BMP File Info Head to %s\n", strFilename);
return -4;
}
dwSize = dwWidth * dwBpp / 8;
pPos = pRgb + (dwHeight - 1) * dwSize;
while (pPos >= pRgb)
{
if (fwrite(pPos, 1, dwSize, fout) != dwSize)
{
DBG_PRINTF("Can't write date to BMP File %s\n", strFilename);
return -5;
}
pPos -= dwSize;
}
fclose(fout);
return 0;
}
posted @   libq8  阅读(26)  评论(0编辑  收藏  举报
相关博文:
阅读排行:
· 【.NET】调用本地 Deepseek 模型
· CSnakes vs Python.NET:高效嵌入与灵活互通的跨语言方案对比
· DeepSeek “源神”启动!「GitHub 热点速览」
· 我与微信审核的“相爱相杀”看个人小程序副业
· Plotly.NET 一个为 .NET 打造的强大开源交互式图表库
点击右上角即可分享
微信分享提示