全志t507 tp2815 采集摄像头视频 app应用demo ffmpeg5将NV21转rgb

 

 

// 摄像头驱动层输出的数据流是 NV21 格式 ,此处是使用ffmpeg5 将nv21 转为rgb并输出到屏幕上

#include <stdio.h>
#include <pthread.h>
#include <unistd.h>
#include <signal.h>

#define VIDEO_DEV "/dev/video0"


#define mydebugAddr(msg) printf("%s: %s:line %d %s:%p\n", __FILE__, __func__, __LINE__,#msg,msg);
#define mydebug printf("[%s %s] %s: %s: %d\n", __DATE__, __TIME__, __FILE__, __func__, __LINE__); 

#include <time.h>
#include <sys/time.h>
#define mydebugUs do{struct timeval tv_start; gettimeofday(&tv_start, NULL); uint64_t start_ms = (tv_start.tv_sec * 1000000 + tv_start.tv_usec);printf("[%ld] %s: %s: %d\n", start_ms, __FILE__, __func__, __LINE__);} while(0);

pthread_mutex_t fastmutex = PTHREAD_MUTEX_INITIALIZER;//互斥锁
pthread_cond_t cond = PTHREAD_COND_INITIALIZER;//条件变量
typedef enum
{
    false=0,
    true,
}bool;
int width=854;
int height=480;
int cameraWidth=1920;
int cameraHeight=1080;
int size;
unsigned char *raw_buff=NULL;
unsigned char video_flag=1;




#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include <libavdevice/avdevice.h>
#include <libavutil/imgutils.h>





#include <linux/fb.h>
#include <linux/input.h>
#include <linux/version.h>
#include <getopt.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h>
#include<sunxi_camera_v2.h>
#include<linux/videodev2.h>
#include <sunxiMemInterface.h>

typedef struct {
    void *start;
    unsigned int phy;
    int length;
    int fd;
    dma_mem_des_t mG2dMem;
}dma_buffer_t;
dma_buffer_t *dma_buffers;
int fd;
#define         DMA_BUFFER_NUM      (6)
#define CLEAR(x) (memset(&(x), 0, sizeof(x)))
static void thread_cleanup(void *arg) {
    int i = 0;
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    ioctl(fd, VIDIOC_STREAMOFF, &type);
    usleep(20 * 1000);
    for (i = 0; i < DMA_BUFFER_NUM; i++) {
        allocFree(MEM_TYPE_DMA, &dma_buffers[i].mG2dMem, NULL);
    }
    for(i = 0; i < DMA_BUFFER_NUM; i++) {
        allocClose(MEM_TYPE_DMA, &dma_buffers[i].mG2dMem, (void *)NULL);
    }
    close(fd);
    free(dma_buffers);
}

void *Video_CollectImage_normal_V4L2_MEMORY_USERPTR(void *arg)
{
    int i  = 0, ret = 0;

    struct v4l2_input inp;
    struct v4l2_streamparm parms;
    struct v4l2_format fmt;
    struct v4l2_requestbuffers req;
    struct v4l2_exportbuffer exp;
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

    dma_buffers = (dma_buffer_t *)malloc(sizeof(dma_buffer_t) * DMA_BUFFER_NUM);//calloc  malloc
    memset(dma_buffers, 0x00, sizeof(dma_buffers));

    pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL);
    pthread_setcanceltype(PTHREAD_CANCEL_ASYNCHRONOUS, NULL);

    pthread_cleanup_push(thread_cleanup, NULL);

    for (i = 0; i < DMA_BUFFER_NUM; i++) {
        int nRet = allocOpen(MEM_TYPE_DMA, &dma_buffers[i].mG2dMem, NULL);
        if (nRet < 0) {
            printf("ion_alloc_open failed\n");
            return (void *)-1;
        }
    }

    fd = open(VIDEO_DEV, O_RDWR /* required */  | O_NONBLOCK, 0);
    if (fd < 0) {
        printf("open %s falied\n", VIDEO_DEV);
        goto err;
    }
    printf("video devname:%s\n", VIDEO_DEV);

    inp.index = 0;
    if (-1 == ioctl(fd, VIDIOC_S_INPUT, &inp)) {
        printf("VIDIOC_S_INPUT 0 error!\n");
        goto err_video_fd; 
    } 

    CLEAR(parms);
    parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    parms.parm.capture.timeperframe.numerator = 1;
    parms.parm.capture.timeperframe.denominator = 25;
    parms.parm.capture.capturemode = V4L2_MODE_VIDEO;
    parms.parm.capture.reserved[0] = 0;
    parms.parm.capture.reserved[1] = 0;/*2:command, 1: wdr, 0: normal*/
    if (-1 == ioctl(fd, VIDIOC_S_PARM, &parms)) {
        printf("VIDIOC_S_PARM error\n");
        goto err_video_fd;
    }

    int nplanes=0;
    CLEAR(fmt);
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    fmt.fmt.pix_mp.width = cameraWidth;
    fmt.fmt.pix_mp.height = cameraHeight;
    fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV21; // YUV420SP
    fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
    if (-1 == ioctl(fd, VIDIOC_S_FMT, &fmt)) {
        printf("VIDIOC_S_FMT error!\n");
        goto err_video_fd;
    }

    if (-1 == ioctl(fd, VIDIOC_G_FMT, &fmt)) {
        printf("VIDIOC_G_FMT error!\n");
        goto err_video_fd;
    } else {
        nplanes = fmt.fmt.pix_mp.num_planes;
        printf("resolution got from sensor = %d*%d num_planes = %d\n",
                fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height,
                fmt.fmt.pix_mp.num_planes);
    }

    CLEAR(req);
    req.count = DMA_BUFFER_NUM;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    req.memory = V4L2_MEMORY_USERPTR;
    if (-1 == ioctl(fd, VIDIOC_REQBUFS, &req)) {
        printf("VIDIOC_REQBUFS error\n");
        goto err_video_fd;
    }

    for (i = 0; i < req.count; ++i) {
        struct v4l2_buffer buf;

        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        buf.memory = V4L2_MEMORY_USERPTR;
        buf.index = i;
        buf.length = nplanes;
        buf.m.planes = (struct v4l2_plane *)calloc(nplanes, sizeof(struct v4l2_plane));
        if (buf.m.planes == NULL) {
            printf("buf.m.planes calloc failed!\n");
            goto err_video_fd;
        }
        if (-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf)) {
            printf("VIDIOC_QUERYBUF error\n");
            free(buf.m.planes);
            goto err_video_fd;
        }

        switch (nplanes) {
            case 1:{
                //sensor_dma_buffer_alloc(&dma_buffers[i],buf.m.planes[0].length );
                dma_buffer_t *dma = &dma_buffers[i];
                size = buf.m.planes[0].length;

                pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
                pthread_mutex_lock(&mutex);
                dma->mG2dMem.size = size;
                int nRet = allocAlloc(MEM_TYPE_DMA, &dma->mG2dMem, NULL);
                if (nRet < 0)
                {
                    pthread_mutex_unlock(&mutex);
                    printf("allocAlloc buf error\n");
                    free(buf.m.planes);
                    goto err_video_fd;
                }
                dma->length = size;
                dma->phy = dma->mG2dMem.phy;
                dma->start = (void *)dma->mG2dMem.vir;
                dma->fd = dma->mG2dMem.ion_buffer.fd_data.aw_fd;
                pthread_mutex_unlock(&mutex);

                buf.m.planes[0].m.userptr = (unsigned long)(dma_buffers[i].start);
            } break;
            default:
                break;
        }

        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        buf.memory = V4L2_MEMORY_USERPTR;

        if (-1 == ioctl(fd, VIDIOC_QBUF, &buf)) {
            perror("VIDIOC_QBUF error: ");
            free(buf.m.planes);
            goto err_video_fd;
        }

        free(buf.m.planes);
    }
    
    printf("width:%d\theight:%d\tsize:%d\n",width,height,size);


    if (-1 == ioctl(fd, VIDIOC_STREAMON, &type)) {
        printf("VIDIOC_STREAMON failed\n");
        goto err_streanon;
    } 
    else{
        printf("VIDIOC_STREAMON ok\n");
    }
    raw_buff=(unsigned char*)malloc(size);
    video_flag=2;
    
    while (1) {
        fd_set fds;
        struct timeval tv;

        tv.tv_sec = 2; /* Timeout. */
        tv.tv_usec = 0;

        FD_ZERO(&fds);
        FD_SET(fd, &fds);
        if (select(fd + 1, &fds, NULL, NULL, &tv) <= 0) {
            printf("timeout\n");
            continue;
        }

        struct v4l2_buffer buf;
        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        buf.memory = V4L2_MEMORY_USERPTR;
        buf.length = nplanes;
        buf.m.planes = (struct v4l2_plane *)calloc(nplanes, sizeof(struct v4l2_plane));

        if (-1 == ioctl(fd, VIDIOC_DQBUF, &buf)) {
            free(buf.m.planes);
            printf("VIDIOC_DQBUF failed\n");
            goto err_strean_off;
        }

        //printf("\n get a frame\n");
        pthread_mutex_lock(&fastmutex);//互斥锁上锁
        memcpy(raw_buff,dma_buffers[buf.index].start,size);
        pthread_cond_broadcast(&cond);//广播唤醒所有线程
        pthread_mutex_unlock(&fastmutex);//互斥锁解锁

        
        // pthread_mutex_lock(&fastmutex);
        // disp_set_addr(width, height, dma_buffers[buf.index].phy, &instance->hdmi_layer_dev);
        // pthread_mutex_unlock(&fastmutex);

        if (-1 == ioctl(fd, VIDIOC_QBUF, &buf)) {
            printf("VIDIOC_QBUF buf.index %d failed\n", buf.index);
            free(buf.m.planes);
            goto err_strean_off;
        }

        free(buf.m.planes);
    }


//    return (void *)0;

err_strean_off:    
    ioctl(fd, VIDIOC_STREAMOFF, &type);

err_streanon:
    for (i = 0; i < DMA_BUFFER_NUM; i++) {
        allocFree(MEM_TYPE_DMA, &dma_buffers[i].mG2dMem, NULL);
    }

err_video_fd:
    close(fd);

err:
    for(i = 0; i < DMA_BUFFER_NUM; i++) {
        allocClose(MEM_TYPE_DMA, &dma_buffers[i].mG2dMem, (void *)NULL);
    }

    pthread_cleanup_pop(0);
    pthread_exit((void *)0);
}





//屏幕显示
#include<unistd.h>
#include<fcntl.h>
#include<linux/fb.h>
#include<linux/kd.h>
#include<sys/mman.h>
#include<sys/ioctl.h>
#include<stdlib.h>
#include<stdio.h>
 
static int frame_fd=-1;
void* framebuffer=NULL;
static int screensize=0;
int framex=0;
 
void framebuffer_init(void){ 
    struct fb_var_screeninfo vinfo;
    struct fb_fix_screeninfo finfo;
    frame_fd = open("/dev/fb0" , O_RDWR);  
    if(frame_fd==-1)  
        perror("open frame buffer fail"),exit(-1);
 
    // Get fixed screen information
    if (ioctl(frame_fd, FBIOGET_FSCREENINFO, &finfo))
        printf("Error reading fixed information.\n"),exit(-1);
   
     // Get variable screen information
    if (ioctl(frame_fd, FBIOGET_VSCREENINFO, &vinfo)) 
        printf("Error reading variable information.\n"),exit(-1);
    //这里把整个显存一起初始化(xres_virtual 表示显存的x,比实际的xres大,bits_per_pixel位深)
   
    screensize = vinfo.xres_virtual * vinfo.yres_virtual * vinfo.bits_per_pixel / 8;
    framex=vinfo.xres_virtual;
    //获取实际的位色,这里很关键,后面转换和填写的时候需要
    int framebpp = vinfo.bits_per_pixel;
    printf("%dx%d, %d bpp  screensize is %ld\n", vinfo.xres_virtual, vinfo.yres_virtual, vinfo.bits_per_pixel,screensize);
    
    //映射出来,用户直接操作
    framebuffer = mmap(0, screensize, PROT_READ | PROT_WRITE , MAP_SHARED ,frame_fd ,0 );  
    memset(framebuffer, 0x00, screensize);
    usleep(500*1000);
    memset(framebuffer, 0xff, screensize);
    if(framebuffer == (void *)-1)  
        perror("memory map fail"),exit(-1);    
}
//数据转化,V4L2_PIX_FMT_NV21  yuv420转rgb32
void NV21_T_RGB(unsigned int width , unsigned int height , unsigned char *yuyv , unsigned char *rgb)
{
    const int nv_start = width * height ;
    uint32_t  i, j, index = 0, rgb_index = 0;
    uint8_t y, u, v;
    int r, g, b, nv_index = 0;
 
    for(i = 0; i < height; i++){
        for(j = 0; j < width; j ++){
            //nv_index = (rgb_index / 2 - width / 2 * ((i + 1) / 2)) * 2;
            nv_index = i / 2  * width + j - j % 2;
 
            y = yuyv[rgb_index]& 0xff;
            u = yuyv[nv_start + nv_index ]& 0xff;
            v = yuyv[nv_start + nv_index + 1]& 0xff;
 
            // yuv to rgb
            r = y + ((351 * (v - 128)) >> 8);  //r
            g = y - ((179 * (v - 128) + 86 * (u - 128)) >> 8); //g
            b = y + ((443 * (u - 128)) >> 8); //b
 
            if(r > 255)   r = 255;
            if(g > 255)   g = 255;
            if(b > 255)   b = 255;
            if(r < 0)     r = 0;
            if(g < 0)     g = 0;
            if(b < 0)     b = 0;
 
            index = rgb_index % width + (height - i - 1) * width;
            //rgb[index * 3+0] = b;
            //rgb[index * 3+1] = g;
            //rgb[index * 3+2] = r;
 
            //颠倒图像
            //rgb[height * width * 3 - i * width * 3 - 3 * j - 1] = b;
            //rgb[height * width * 3 - i * width * 3 - 3 * j - 2] = g;
            //rgb[height * width * 3 - i * width * 3 - 3 * j - 3] = r;
 
            //正面图像
            rgb[i * width * 3 + 3 * j + 0] = b;
            rgb[i * width * 3 + 3 * j + 1] = g;
            rgb[i * width * 3 + 3 * j + 2] = r;
 
            rgb_index++;
        }
    }
    return 0;
} 
void framebuffer_close(void){
    munmap(framebuffer,screensize);  
    if(frame_fd==-1)
        perror("file don't exit\n"),exit(-1);
    close(frame_fd);  
}
typedef struct{
    unsigned char r;
    unsigned char g;
    unsigned char b;
    unsigned char rgbReserved;
}rgb32_frame;
 
typedef struct{
    unsigned char r;
    unsigned char g;
    unsigned char b;
}rgb24;
 
//写入framebuffer   img_buf:采集到的图片首地址  width:用户的宽 height:用户的高  bits:图片的位深 
void framebuffer_write(void *img_buf, unsigned int img_width, unsigned int img_height, unsigned int img_bits){   
    int row, column;
    int num = 0;        //img_buf 中的某个像素点元素的下标
    rgb32_frame *rgb32_fbp = (rgb32_frame *)framebuffer;
        
    
    //防止摄像头采集宽高比显存大
    // if(screensize < img_width * img_height * img_bits / 8){
    //     printf("the imgsize is too large\n"),exit(-1);
    // }
    /*不同的位深度图片使用不同的显示方案*/
    switch (img_bits){
        case 24:{
            rgb24 *rgb24_img_buf = (rgb24 *)img_buf;
            for(row = 0; row < img_height; row++){
                for(column = 0; column < img_width; column++){
                    //由于摄像头分辨率没有帧缓冲大,完成显示后,需要强制换行,帧缓冲是线性的,使用row * vinfo.xres_virtual换行
                    rgb32_fbp[row * framex + column].r = rgb24_img_buf[num].b;
                    rgb32_fbp[row * framex + column].g = rgb24_img_buf[num].g;
                    rgb32_fbp[row * framex + column].b = rgb24_img_buf[num].r;
                    rgb32_fbp[row * framex + column].rgbReserved=0xff;
                    num++;
                }        
            }
        } break;
        case 32:{
            for(row = 0; row < img_height; row++){
                memcpy(framebuffer+row*framex*4,img_buf+row*img_width*4,img_width*4);      
            }
        } break;
        
        default:
            break;
    }
}


//求最大公约数 结合辗转相除法和更相减损法的优势以及移位运算 结合后的Stein算法
int GetGCDStein(int x, int y)
{
    if (x < y)
    {
        int tmp = x;
        x = y;
        y = tmp;
    }
    if ( x%y == 0)
    {
        return y;
    }
    if (x % 2 == 0 && y % 2 == 0)
    {
        return 2*GetGCDStein(x >> 1, y >> 1);
    }
    else if (x%2 == 0 && y%2 != 0)
    {
        return GetGCDStein(x >> 1, y);
    }
    else if (x % 2 != 0 && y % 2 == 0)
    {
        return GetGCDStein(x, y >> 1);
    }
    else if (x % 2 != 0 && y % 2 != 0)
    {
        return GetGCDStein(x, (x - y) >> 1);
    }
}
//将nv21数据转化成rgb的数据,先缩小
// input: nv21数据,长度为宽*高*3/2
// output: rgb数据,长度为宽*高*4
// inwidth: 输入图像宽度
// inheight:输入图像高度
// outwidth:    输出图像宽度
// outheight:    输出图像高度
int nv21_to_rgb32_screen(unsigned char* input, int inwidth, int inheight, unsigned char* output, int outwidth, int outheight) {
    if (inwidth < 1 || inheight < 1 || input == NULL || output == NULL){ return 0; }
        
    // bit depth
    int depth = 4;
    int nvOff = inwidth * inheight;
    int i, j, yIndex = 0;
    int y, u, v;
    int r, g, b, nvIndex = 0;
    unsigned char* yuvData = input;
    unsigned char* rgbData = output;

    //求最大公约数
    //if((inwidth/inheight)!=(outwidth/outheight)){return -1;}//比例需要一致
    if(inwidth<outwidth){return -2;}
    int base=GetGCDStein(inwidth,outwidth);

    int basehinj=0;
    int baseha=inheight/base;
    int basehb=outheight/base;
    int basehc=baseha-basehb;
    int basehd=baseha/basehb;//偏头
    int basehe=baseha%basehb;
    int basehf1=basehd+1;//正常满
    int basehf2=basehf1+1;//余头满
    int basehf1max=(basehd+1+1)*basehe;
    int basehf2max=baseha;

    int basewinj=0;
    int basewa=inwidth/base;
    int basewb=outwidth/base;
    int basewc=basewa-basewb;
    int basewd=basewa/basewb;//偏头
    int basewe=basewa%basewb;
    int basewf1=basewd+1;//正常满
    int basewf2=basewf1+1;//余头满
    int basewf1max=(basewd+1+1)*basewe;
    int basewf2max=basewa;
    for (i = 0; i < inheight; i++) {
        //缩放计算
        if(basehinj>=basehf2max){basehinj=0;}
        if(basehinj==0){basehinj+=1;}
        else{
            if(basehinj<=basehf1max){
                if(basehinj%basehf2==0){
                    basehinj+=1;
                }
                else{
                    basehinj+=1;
                    yIndex+=inwidth;
                    continue;
                }
            }
            else{
                if(basehinj%basehf1==0){
                    basehinj+=1;
                }
                else{
                    basehinj+=1;
                    yIndex+=inwidth;
                    continue;
                }
            }
        }

        for (j = 0; j < inwidth; j++, ++yIndex) {
            //缩放计算
            if(basewinj>=basewf2max){basewinj=0;}
            if(basewinj==0){basewinj+=1;}
            else{
                if(basewinj<=basewf1max){
                    if(basewinj%basewf2==0){
                        basewinj+=1;
                    }
                    else{
                        basewinj+=1;
                        continue;
                    }
                }
                else{
                    if(basewinj%basewf1==0){
                        basewinj+=1;
                    }
                    else{
                        basewinj+=1;
                        continue;
                    }
                }
            }
            
            nvIndex = (i / 2) * inwidth + j - j % 2;
            y = yuvData[yIndex] & 0xff;
            u = yuvData[nvOff + nvIndex] & 0xff;
            v = yuvData[nvOff + nvIndex + 1] & 0xff;
 
            // yuv to rgb
            r = y + ((351 * (v - 128)) >> 8);  //r
            g = y - ((179 * (v - 128) + 86 * (u - 128)) >> 8); //g
            b = y + ((443 * (u - 128)) >> 8); //b
 
            r = ((r > 255) ? 255 : (r < 0) ? 0 : r);
            g = ((g > 255) ? 255 : (g < 0) ? 0 : g);
            b = ((b > 255) ? 255 : (b < 0) ? 0 : b);
            // RGB格式的图像存储的顺序,并非像字面的顺序,而是以:B、G、R的顺序进行存储。
            *(rgbData + yIndex * depth + 0) = r;
            *(rgbData + yIndex * depth + 1) = g;
            *(rgbData + yIndex * depth + 2) = b;
            *(rgbData + yIndex * depth + 3) = 0xff;
        }
    }
    return 1;
}



//ffmpeg转化
static AVFrame * Input_pFrame = NULL;
static AVFrame * Output_pFrame = NULL;
struct SwsContext * img_convert_ctx = NULL; //用于解码后的视频格式转换
void ffmpeg_trans_init(int inwidth, int inheight, int outwidth, int outheight){
    Output_pFrame = av_frame_alloc(); //存放RGB数据的缓冲区
    Input_pFrame = av_frame_alloc(); //存放YUV数据的缓冲区
    img_convert_ctx = sws_getContext( inwidth, inheight, AV_PIX_FMT_NV21, outwidth, outheight, AV_PIX_FMT_RGB24, SWS_FAST_BILINEAR, NULL, NULL, NULL);
}
//转化
static void ffmpeg_trans_NV21_TO_RGB888(unsigned char * yuv, int inwidth, int inheight, unsigned char * rgb888, int outwidth, int outheight) {
    // AVFrame * Input_pFrame = NULL;
    // AVFrame * Output_pFrame = NULL;
    // struct SwsContext * img_convert_ctx = NULL; //用于解码后的视频格式转换
    /*1. 申请空间*/
    //Output_pFrame = av_frame_alloc(); //存放RGB数据的缓冲区
    //Input_pFrame = av_frame_alloc(); //存放YUV数据的缓冲区
    /*2.设置转码参数*/
    //img_convert_ctx = sws_getContext( inwidth, inheight, AV_PIX_FMT_NV21, outwidth, outheight, AV_PIX_FMT_RGB24, SWS_FAST_BILINEAR, NULL, NULL, NULL);
    /*4. 设置转码的源数据地址*/
    av_image_fill_arrays(Input_pFrame->data, Input_pFrame->linesize, yuv, AV_PIX_FMT_NV21, inwidth, inheight,1);
    av_image_fill_arrays(Output_pFrame->data, Output_pFrame->linesize, rgb888, AV_PIX_FMT_RGB24, outwidth, outheight,1);

    //转格式
    sws_scale(img_convert_ctx, Input_pFrame->data, Input_pFrame->linesize, 0, inheight, Output_pFrame->data, Output_pFrame->linesize);
    //释放空间
    // if (Input_pFrame) av_free(Input_pFrame);
    // if (Output_pFrame) av_free(Output_pFrame);
    // if (img_convert_ctx) sws_freeContext(img_convert_ctx);
}
void ffmpeg_trans_release(void){
    //释放空间
    if (Input_pFrame) av_free(Input_pFrame);
    if (Output_pFrame) av_free(Output_pFrame);
    if (img_convert_ctx) sws_freeContext(img_convert_ctx);
}

//测试//高位是透明度,最高位是ff才会显示
#define RGB888_R 0x00ff0000
#define RGB888_G 0x0000ff00
#define RGB888_B 0x000000ff

void lcd_put_pixel (int x, int y,unsigned int color888,int line_width, int pixel_width,unsigned char* fb_based)
{
    unsigned char *pen_8 = fb_based + y*line_width + x*pixel_width;
    unsigned short *pen_16;
    unsigned int *pen_32;

    pen_16 = (unsigned short *)pen_8;
    pen_32 = (unsigned int *)pen_8;

    switch(pixel_width*8)
    {
        case 8:
        {
            *pen_8 = color888;
            break;
        }
        case 16:
        {
            unsigned char R = (color888 & RGB888_R)>>19;
            unsigned char G = (color888 & RGB888_G)>>10;
            unsigned char B = (color888 & RGB888_B)>>3;

            unsigned short color565= (R<<11)+(G<<5)+(B<<0);
            
            *pen_16 = color565;
            break;
        }
        case 32:
        {
            *pen_32 = color888;
            break;
        }
    }
    
}
void testscreen(void){

    struct fb_var_screeninfo var;
    int screen_size;
    unsigned char* fb_based;
    unsigned int pixel_width;
    unsigned int line_width;
    int fb_open = open("/dev/fb0", O_RDWR);
    if(fb_open < 0)
    {
        printf("can't open /dev/fb0\n");
        return -1;
    }

    if(ioctl(fb_open, FBIOGET_VSCREENINFO, &var))
    {
        printf("can't get var!\n");
        return -1;
    }

    
    pixel_width = var.bits_per_pixel/8;
    line_width = var.xres*pixel_width;
    screen_size = var.xres*var.yres*pixel_width;
    fb_based = (unsigned char*)mmap(NULL, screen_size, PROT_READ|PROT_WRITE,MAP_SHARED,fb_open,0);

    printf("screen:%d * %d,%d\n",var.xres,var.yres,screen_size);

    if(fb_based == (unsigned char*)-1)
    {
        printf("can't mmap!\n");
        return -1;
    }

    memset(fb_based, 0xff, screen_size);

    int offset=0;
    for(int i =0; i <100; i++){
        for(int j =0; j <100; j++){ lcd_put_pixel(var.xres/2+j+offset, var.yres/2+i,0xff0000ff,line_width,pixel_width,fb_based); }//蓝色
    }
    offset=250;
    for(int i =0; i <100; i++){
        for(int j =0; j <100; j++){ lcd_put_pixel(var.xres/2+j+offset, var.yres/2+i,0xffff0000,line_width,pixel_width,fb_based); }//红色
    }
    offset=500;
    for(int i =0; i <100; i++){
        for(int j =0; j <100; j++){ lcd_put_pixel(var.xres/2+j+offset, var.yres/2+i,0xff00ff00,line_width,pixel_width,fb_based); }//绿
    }
    munmap(fb_based, screen_size);
    close(fb_open);
}


pthread_t pthid=NULL;
static void terminate(int sig_no)
{
    int i = 0;
    printf("Got signal %d, exiting ...\n", sig_no);

    if(pthid){
        pthread_cancel(pthid);
        pthread_join(pthid, NULL);
    }
    framebuffer_close();
    ffmpeg_trans_release();
    usleep(20*1000);
    exit(1);
}

static void install_sig_handler(void)
{
    signal(SIGBUS, terminate);
    signal(SIGFPE, terminate);
    signal(SIGHUP, terminate);
    signal(SIGILL, terminate);
    signal(SIGKILL, terminate);
    signal(SIGINT, terminate);
    signal(SIGIOT, terminate);
    signal(SIGPIPE, terminate);
    signal(SIGQUIT, terminate);
    signal(SIGSEGV, terminate);
    signal(SIGSYS, terminate);
    signal(SIGTERM, terminate);
    signal(SIGTRAP, terminate);
    signal(SIGUSR1, terminate);
    signal(SIGUSR2, terminate);
}

int main()
{
    install_sig_handler();
    //testscreen(); while (1) { sleep(1); }

    
    /*创建摄像头采集线程*/
    ffmpeg_trans_init(cameraWidth,cameraHeight,width,height);
    mydebugUs;
    
    pthread_create(&pthid,NULL,Video_CollectImage_normal_V4L2_MEMORY_USERPTR, NULL);
    pthread_detach(pthid);/*设置分离属性*/
    while (video_flag==1) { usleep(1); }
    mydebugUs;


    printf("image:%d * %d,%d\n",width,height,size);
    unsigned char *rgb_data32=malloc(width*height*10);
    framebuffer_init();//while (1) { mydebug; sleep(1); }
    bool quit=true;
    unsigned int count=0;
    while(quit)
    {
        if(!video_flag)
        {
            quit=false;
            continue;
        }
        pthread_mutex_lock(&fastmutex);//互斥锁上锁
        pthread_cond_wait(&cond,&fastmutex);
        //nv21_to_rgb32_screen(raw_buff,cameraWidth,cameraHeight,rgb_data32,width,height);
        ffmpeg_trans_NV21_TO_RGB888(raw_buff,cameraWidth,cameraHeight,rgb_data32,width,height);

        pthread_mutex_unlock(&fastmutex);//互斥锁解锁 //raw_buff
        framebuffer_write(rgb_data32,width,height,24);
        //memcpy(framebuffer,rgb_data32,width*height*4);
        if(count%100==0) printf("framebuffer_write count:%ld\n",count);
        count++;
    }

    pthread_mutex_destroy(&fastmutex);/*销毁互斥锁*/
    pthread_cond_destroy(&cond);/*销毁条件变量*/
    free(raw_buff);
    free(rgb_data32);
    return 0;
}

 

makefile

#Makefile
# mount -t nfs -o nolock,vers=3 192.168.5.12:/home/book/nfs_rootfs /mnt
# ps -ef | grep jc | grep -v grep | awk '{print $1}' | xargs kill  && fbinit 0 && cd root
# fbinit 0
# cp /mnt/jc_03_camera_ffmpeg /root/ && cd /root && ./jc_03_camera_ffmpeg

CC=aarch64-linux-gnu-gcc
CFLAGS= -g -Wall -O2 -I. -I/root/pro/ffmpeg_build/ffmpeg -I/opt/EmbedSky/TQT507/CoreA/longan//platform/framework/auto/sdk_lib/include

LDFLAGS += -L/opt/EmbedSky/TQT507/CoreA/longan/out/t507/evb/longan/buildroot/target/usr/lib 
LDFLAGS += -lm -lz -lpthread -fstack-usage -lfreetype -lpng -lbz2 -lavformat -lavcodec -lavutil -lswscale -ltinyalsa -lswresample -lavdevice  
LDFLAGS += -L/platform/framework/auto/sdk_lib/cedarx/lib -lcdx_ion
LDFLAGS += -L/platform/framework/auto/sdk_lib/sdk_memory -lsdk_memory


TARGET= jc_03_camera_ffmpeg

OUTPUT = /home/book/nfs_rootfs
OUTPUT_BOARD = /opt/EmbedSky/TQT507/CoreA/longan/out/t507/evb/longan/buildroot/target/root



#Collect the files to compile
MAINSRC = linux_ffmpeg_v4l2.c
CSRCS = 


# CSRCS +=$(LVGL_DIR)/mouse_cursor_icon.c 
# $(info  __debuginfo__ $(CSRCS))

OBJEXT ?= .o

AOBJS = $(ASRCS:.S=$(OBJEXT))
COBJS = $(CSRCS:.c=$(OBJEXT))

MAINOBJ = $(MAINSRC:.c=$(OBJEXT))

SRCS = $(ASRCS) $(CSRCS) $(MAINSRC)
OBJS = $(AOBJS) $(COBJS)



all: default

%.o: %.c
    $(CC) $(CFLAGS) -c $< -o $@
    @echo "CC $<"
    
default: $(AOBJS) $(COBJS) $(MAINOBJ)
    $(CC) -o $(TARGET) $(MAINOBJ) $(AOBJS) $(COBJS) $(CFLAGS) $(LDFLAGS)
    @echo finished $(BIN)
    cp $(TARGET) $(OUTPUT) 
    cp $(TARGET) $(OUTPUT_BOARD) 


.PHONY:clean
clean:
    rm -rf *.o $(TARGET) $(CXX_OBJS) $(C_OBJS) out.*
    

 

邮箱 2471563510@qq.com

 

posted @ 2022-11-19 09:46  小城熊儿  阅读(549)  评论(0编辑  收藏  举报