在rk3399(rockpi 4b)上链接gc2145(自带isp的相机模组)(三)
前二小章叙述了准备以及内核部分的全部操作
接下来进行应用层部分的操作,
应用层主要基于v4l2的框架+libyuv+opencv完成
cam.c基于v4l2
#include <stdio.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <errno.h>
#include <fcntl.h>
#include <string.h>
#include <stdlib.h>
//#include <iostream>
#include <linux/videodev2.h>
#define BUFFER_COUNT 2
#define FMT_NUM_PLANES 1
static int v4l2_fd;
static enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
struct buffer
{
void *start;
size_t length;
struct v4l2_buffer v4l2_buf;
};
struct buffer *buffers;
static unsigned int n_buffers;
static int xioctl(int fd, int request, void *arg)
{
int r;
do
{
r = ioctl(fd, request, arg);
} while (-1 == r && EINTR == errno);
return r;
}
int v4l2init(int w, int h, __u32 pixelformat)
{
int rc;
struct v4l2_format fmt;
struct v4l2_requestbuffers req;
struct v4l2_buffer buf;
enum v4l2_buf_type type;
v4l2_fd = open("/dev/video0", O_RDWR);
if (v4l2_fd < 0)
{
fprintf(stderr, "open = %d, errno = %d \r\n", v4l2_fd, errno);
return -1;
}
struct v4l2_capability cap;
if (xioctl(v4l2_fd, VIDIOC_QUERYCAP, &cap) == -1)
{
printf("Error opening device : unable to query device.\r\n");
return -1;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE))
{
printf("this is not a video capture device, capabilities: %x \r\n", cap.capabilities);
return -1;
}
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
printf(" does not support streaming i/o \r\n");
return -1;
}
if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)
buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
printf("capabilities is %d \r\n", buf_type);
memset(&fmt, 0, sizeof(fmt));
fmt.type = buf_type;
fmt.fmt.pix.width = w;
fmt.fmt.pix.height = h;
fmt.fmt.pix.pixelformat = pixelformat;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
rc = xioctl(v4l2_fd, VIDIOC_S_FMT, &fmt);
if (rc < 0)
{
fprintf(stderr, "VIDIOC_S_FMT: errno = %d \r\n", errno);
return -1;
}
return 0;
}
int init_mamp()
{
int rc;
struct v4l2_requestbuffers req;
struct v4l2_buffer buf;
struct v4l2_plane planes[FMT_NUM_PLANES];
enum v4l2_buf_type type;
memset(&req, 0, sizeof(req));
req.count = BUFFER_COUNT;
req.type = buf_type;
req.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl(v4l2_fd, VIDIOC_REQBUFS, &req))
{
fprintf(stderr, "VIDIOC_REQBUFS: errno = %d \r\n", errno);
return -1;
}
if (req.count < BUFFER_COUNT)
{
printf("Insufficient buffer memory \r\n");
return -1;
}
buffers = (struct buffer *)calloc(req.count, sizeof(*buffers));
if (!buffers)
{
printf("Out of memory\n");
}
for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
{
memset(&buf, 0, sizeof(buf));
memset(&planes, 0, sizeof(planes));
buf.type = buf_type;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf_type)
{
buf.m.planes = planes;
buf.length = FMT_NUM_PLANES;
}
if (-1 == xioctl(v4l2_fd, VIDIOC_QUERYBUF, &buf))
{
fprintf(stderr, "VIDIOC_QUERYBUF: errno = %d \r\n", errno);
return -1;
}
if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf_type)
{
buffers[n_buffers].length = buf.m.planes[0].length;
buffers[n_buffers].start =
mmap(NULL /* start anywhere */,
buf.m.planes[0].length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
v4l2_fd, buf.m.planes[0].m.mem_offset);
}
else
{
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap(NULL /* start anywhere */,
buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
v4l2_fd, buf.m.offset);
}
if (MAP_FAILED == buffers[n_buffers].start)
printf("mmap error");
memset(&buf, 0, sizeof(buf));
buf.type = buf_type;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf_type)
{
buf.m.planes = planes;
buf.length = FMT_NUM_PLANES;
}
if (-1 == xioctl(v4l2_fd, VIDIOC_QBUF, &buf))
{
fprintf(stderr, "VIDIOC_QBUF: errno = %d \r\n", errno);
return -1;
}
}
type = buf_type;
if (-1 == xioctl(v4l2_fd, VIDIOC_STREAMON, &type))
{
fprintf(stderr, "VIDIOC_STREAMON: errno = %d \r\n", errno);
return -1;
}
return 0;
}
int v4l2end(void)
{
int rc;
enum v4l2_buf_type type;
type = buf_type;
rc = xioctl(v4l2_fd, VIDIOC_STREAMOFF, &type);
if (rc < 0)
{
fprintf(stderr, "VIDIOC_STREAMOFF: errno = %d\n", errno);
return -1;
}
close(v4l2_fd);
return 0;
}
int v4l2grab(unsigned char **frame ,int *bytesize)
{
int rc;
int i, bytesused;
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.memory = V4L2_MEMORY_MMAP;
buf.type = buf_type;
if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf_type)
{
struct v4l2_plane planes[FMT_NUM_PLANES];
buf.m.planes = planes;
buf.length = FMT_NUM_PLANES;
}
rc = xioctl(v4l2_fd, VIDIOC_DQBUF, &buf);
if (rc < 0)
{
fprintf(stderr, "VIDIOC_DQBUF: errno = %d\n", errno);
return -1;
}
i = buf.index;
if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf_type)
bytesused = buf.m.planes[0].bytesused;
else
bytesused = buf.bytesused;
//printf("index is %d size is %d \r\n", buf.index, bytesused);
if (buf.index < BUFFER_COUNT)
{
*frame = (unsigned char *)buffers[i].start;
*bytesize = bytesused;
return buf.index;
}
}
int v4l2release(int buf_idx)
{
int rc;
struct v4l2_buffer buf;
struct v4l2_plane planes[FMT_NUM_PLANES];
memset(&buf, 0, sizeof(buf));
buf.memory = V4L2_MEMORY_MMAP;
buf.type = buf_type;
if (buf_idx < BUFFER_COUNT)
{
if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf_type)
{
buf.m.planes = planes;
buf.length = FMT_NUM_PLANES;
}
buf.index = buf_idx;
rc = xioctl(v4l2_fd, VIDIOC_QBUF, &buf);
if (rc < 0)
{
fprintf(stderr, "VIDIOC_QBUF: errno = %d\n", errno);
return -1;
}
}
return 0;
}
yuvvideo.c基于libyuv+opencv3.4.1
#include <stdio.h>
#include <unistd.h>
#include <linux/videodev2.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "cam.h"
#include "libyuv.h"
#define WIDTH 800
#define HEIGHT 600
/**
* I420就是YUV420P
* @param yuvData
* @param rgb24
* @param width
* @param height
*/
void I420_TO_RGB24(unsigned char *yuvData, unsigned char *rgb24, int width, int height) {
unsigned char *ybase = yuvData;
unsigned char *ubase = &yuvData[width * height];
unsigned char *vbase = &yuvData[width * height * 5 / 4];
//YUV420P转RGB24
libyuv::I420ToRGB24(ybase, width, ubase, width / 2, vbase, width / 2,
rgb24,
width * 3, width, height);
}
int main()
{
int rc, key;
int size;
unsigned char *buf;
unsigned char rgb_buf[WIDTH * HEIGHT * 3];
int w = WIDTH, h = HEIGHT;
rc = v4l2init(w, h, V4L2_PIX_FMT_YUV420);
if (rc < 0)
{
fprintf(stderr, "v4l2init = %d\n", rc);
return -1;
}
rc = init_mamp();
if (rc < 0)
{
fprintf(stderr, "init_mamp = %d\n", rc);
return -1;
}
cv::Mat img(cv::Size(w, h), CV_8UC3);
const char window[16] = "camera";
cv::namedWindow(window);
while (1)
{
rc = v4l2grab(&buf,&size);
if (rc < 0)
{
fprintf(stderr, "v4l2grab = %d\n", rc);
return -1;
}
I420_TO_RGB24(buf, rgb_buf,w,h);
rc = v4l2release(rc);
if (rc < 0)
{
fprintf(stderr, "v4l2release = %d\n", rc);
return -1;
}
img.data = rgb_buf;
cv::imshow(window, img);
key = cv::waitKey(1);
if (key == 'q')
{
break;
}
}
rc = v4l2end();
if (rc < 0)
{
fprintf(stderr, "v4l2end = %d\n", rc);
return -1;
}
return 0;
}
Makefile
OPENCV_DIR=/xxx/opencv-3.4.1
LIBYUV_DIR=/xxxx/libyuv
INCLUDE=-I${OPENCV_DIR}/build -I${OPENCV_DIR}/modules/core/include \
-I${OPENCV_DIR}/modules/highgui/include -I${OPENCV_DIR}/modules/imgproc/include \
-I${OPENCV_DIR}/modules/imgcodecs/include -I${OPENCV_DIR}/modules/videoio/include \
-I/$(LIBYUV_DIR)/include
LIB=-L${OPENCV_DIR}/build/lib -lstdc++ -lopencv_core -lopencv_imgcodecs -lopencv_highgui\
-L/$(LIBYUV_DIR)/build -lyuv
all: yuvvideo
yuvvideo: yuvvideo.cc cam.c
g++ -g -o $@ $^ ${INCLUDE} ${LIB}
clean:
rm yuvvideo
libyuv 和opencv 都是网上很容易下载到的资料,就不赘述了
效果图