摄像头应用编程(四):ARM Linux LCD实时预览UVC摄像头画面

news2025/3/14 3:05:17

文章目录

  • 1、前言
  • 2、环境介绍
  • 3、步骤
  • 4、应用程序编写
    • 4.1、lcd初始化
    • 4.2、摄像头初始化
    • 4.3、jpeg解码
    • 4.4、开启摄像头
    • 4.5、完整的程序如下
  • 5、测试
    • 5.1、编译应用程序
    • 5.2、运行应用程序
  • 6、总结

1、前言

本次应用程序主要针对支持MJPEG格式输出的UVC摄像头。

2、环境介绍

rk3566 + 7寸 mipi lcd + uvc摄像头

3、步骤

应用程序编写主要分为以下几个步骤:

1、lcd初始化。

2、摄像头初始化。

3、摄像头采集数据。

4、jpeg解码。

5、lcd显示。

4、应用程序编写

4.1、lcd初始化

typedef struct lcd_mes {
    int fd;
    unsigned char *fb_base;
    int lcd_width;
    int lcd_height;
    unsigned int bpp;
    unsigned int line_width;
} lcd_mes;

int lcd_init(const char *fb_dev, lcd_mes *lcd)
{
    int screen_size;
    struct fb_var_screeninfo var;   

    if (fb_dev == NULL)
        goto _err;

    /* 1. open /dev/fb* */    
    lcd->fd = open(fb_dev, O_RDWR);
    if(lcd->fd < 0)
    {
        printf("can not open %s\n", fb_dev);
        goto _err;
    }

    /* 2. get lcd message */
    if (ioctl(lcd->fd, FBIOGET_VSCREENINFO, &var))
    {
        printf("can not get var\n");
        goto _err;
    }

    screen_size = var.xres * var.yres * var.bits_per_pixel / 8;
    lcd->line_width  = var.xres * var.bits_per_pixel / 8;
    lcd->lcd_width = var.xres;
    lcd->lcd_height = var.yres;
    lcd->bpp = var.bits_per_pixel;
    lcd->fb_base = mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, lcd->fd, 0);
    if (lcd->fb_base == (unsigned char *)-1)
    {
        printf("can not mmap\n");
        goto _err;
    }

    memset(lcd->fb_base, 0x00, screen_size);
    return 0;

_err:
    return -1;
}

4.2、摄像头初始化

使用v4l2接口初始化uvc摄像头:

typedef struct camera_mes {
    int fd;
    void *bufs[32];
    int bufs_index;
    int buf_length;
    char fmt[20];
    int frame_x_size;
    int frame_y_size;
} camera_mes;

int camera_init(const char *video, camera_mes *camera)
{   
    struct v4l2_fmtdesc fmtdesc;
    struct v4l2_frmsizeenum fsenum;
    int fmt_index = 0;
    int frame_index = 0;
    int buf_cnt;
    int i;

    if (video == NULL)
        goto _err;

    /* 1. open /dev/video* */
    camera->fd = open(video, O_RDWR);
    if (camera->fd < 0)
    {
        printf("can not open %s\n", video);
        goto _err;
    }

    /* 2. query capability */
    struct v4l2_capability cap;
    memset(&cap, 0, sizeof(struct v4l2_capability));

    if (0 == ioctl(camera->fd, VIDIOC_QUERYCAP, &cap))
    {        
        if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) 
        {
            fprintf(stderr, "Error opening device %s: video capture not supported.\n", video);
            goto _ioc_querycap_err;
        }

        if(!(cap.capabilities & V4L2_CAP_STREAMING)) 
        {
            fprintf(stderr, "%s does not support streaming i/o\n", video);
            goto _ioc_querycap_err;
        }
    }
    else
    {
        printf("can not get capability\n");
        goto _ioc_querycap_err;
    }

    /* 3. enum formt */
    while (1)
    {
        fmtdesc.index = fmt_index;  
        fmtdesc.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;  
        if (0 != ioctl(camera->fd, VIDIOC_ENUM_FMT, &fmtdesc))
            break;

        frame_index = 0;
        // printf("format %s,%d:\n", fmtdesc.description, fmtdesc.pixelformat);
        while (1)
        {
            memset(&fsenum, 0, sizeof(struct v4l2_frmsizeenum));
            fsenum.pixel_format = fmtdesc.pixelformat;
            fsenum.index = frame_index;

            /* get framesize */
            if (ioctl(camera->fd, VIDIOC_ENUM_FRAMESIZES, &fsenum) == 0)
            {
                // printf("\t%d: %d x %d\n", frame_index, fsenum.discrete.width, fsenum.discrete.height);
            }
            else
            {
                break;
            }

            frame_index++;
        }

        fmt_index++;
    }

    /* 4. set formt */
    struct v4l2_format fmt;
    memset(&fmt, 0, sizeof(struct v4l2_format));
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = camera->frame_x_size;
    fmt.fmt.pix.height = camera->frame_y_size;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
    fmt.fmt.pix.field = V4L2_FIELD_ANY;
    if (0 == ioctl(camera->fd, VIDIOC_S_FMT, &fmt))
    {
        // printf("the final frame-size has been set : %d x %d\n", fmt.fmt.pix.width, fmt.fmt.pix.height);
        camera->frame_x_size = fmt.fmt.pix.width;
        camera->frame_y_size = fmt.fmt.pix.height;
        strncpy(camera->fmt, "Motion-JPEG", strlen("Motion-JPEG"));
    }
    else
    {
        printf("can not set format\n");
        goto _ioc_sfmt_err;
    }

    /* 5. require buffer */
    struct v4l2_requestbuffers rb;
    memset(&rb, 0, sizeof(struct v4l2_requestbuffers));
    rb.count = 32;
    rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    rb.memory = V4L2_MEMORY_MMAP;

    if (0 == ioctl(camera->fd, VIDIOC_REQBUFS, &rb))
    {
        buf_cnt = rb.count;
        for(i = 0; i < rb.count; i++) 
        {
            struct v4l2_buffer buf;
            memset(&buf, 0, sizeof(struct v4l2_buffer));
            buf.index = i;
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            if (0 == ioctl(camera->fd, VIDIOC_QUERYBUF, &buf))
            {
                /* mmap */
                camera->bufs[i] = mmap(0, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera->fd, buf.m.offset);
                if(camera->bufs[i] == MAP_FAILED) 
                {
                    printf("Unable to map buffer");
                    goto _err;
                }
            }
            else
            {
                printf("can not query buffer\n");
                goto _err;
            }            
        }
    }
    else
    {
        printf("can not request buffers\n");
        goto _ioc_reqbufs_err;
    }

    /* 6. queue buffer */
    for(i = 0; i < buf_cnt; ++i) 
    {
        struct v4l2_buffer buf;
        memset(&buf, 0, sizeof(struct v4l2_buffer));
        buf.index = i;
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        if (0 != ioctl(camera->fd, VIDIOC_QBUF, &buf))
        {
            perror("Unable to queue buffer");
            goto _ioc_qbuf_err;
        }
    }

    camera->bufs_index = 0;     // init camera struct
    camera->buf_length = 0;
    return 0;

_ioc_qbuf_err:
_ioc_reqbufs_err:
_ioc_sfmt_err:
_ioc_querycap_err:
_err:
    return -1;
}

4.3、jpeg解码

int jpeg_show_on_lcd(lcd_mes *lcd, camera_mes *camera)
{
    int min_width, min_height;
    int valid_bytes;
    int offset_x, offset_y;

	struct jpeg_decompress_struct cinfo;
	struct jpeg_error_mgr jerr;
    
	cinfo.err = jpeg_std_error(&jerr);      // 错误处理对象与解码对象绑定
	jpeg_create_decompress(&cinfo);         // 初始化解码器
	jpeg_mem_src(&cinfo, camera->bufs[camera->bufs_index], camera->buf_length);   // 指定JPEG数据的来源

	jpeg_read_header(&cinfo, TRUE);         // 读取图像信息
	cinfo.out_color_space = JCS_RGB;        // 设置解码后的颜色空间为RGB

	jpeg_start_decompress(&cinfo);          // 开始解码
	
	unsigned char *jpeg_line_buf = (char *)malloc(cinfo.output_components * cinfo.output_width);    // 用于存储从JPEG解码器读取的一行数据
	unsigned int *fb_line_buf = (int *)malloc(lcd->line_width);                                     // 用于存储转换后的RGB数据,准备写入framebuffer

    min_width = (cinfo.output_width < lcd->lcd_width) ? cinfo.output_width : lcd->lcd_width;
    min_height = (cinfo.output_height < lcd->lcd_height) ? cinfo.output_height : lcd->lcd_height;

	valid_bytes = min_width * lcd->bpp / 8;             // 一行的有效字节数
	unsigned char *ptr = lcd->fb_base;

    offset_x = ((lcd->lcd_width - min_width) / 2) * lcd->bpp / 8;   // x方向居中
    offset_y = (lcd->lcd_height - min_height) / 2;      // y方向居中
    for (int i = 0; i < offset_y; i++)                  
        ptr += lcd->lcd_width * lcd->bpp / 8;
    
    unsigned int red, green, blue;
    unsigned int color;
	while (cinfo.output_scanline < min_height)
	{
		jpeg_read_scanlines(&cinfo, &jpeg_line_buf, 1); // 每次读取一行

		for(int i = 0; i < min_width; i++)              
		{
			red = jpeg_line_buf[i * 3];                 
			green = jpeg_line_buf[i * 3 + 1];           
			blue = jpeg_line_buf[i * 3 + 2];            
			color = red << 16 | green << 8 | blue;      // RGB888转RGB8888

			fb_line_buf[i] = color;
		}

		memcpy(ptr + offset_x, fb_line_buf, valid_bytes);   // 将一行数据写入framebuffer
		ptr += lcd->lcd_width * lcd->bpp / 8;               // 移动到下一行
	}
	
	jpeg_finish_decompress(&cinfo);                     // 完成解码
	jpeg_destroy_decompress(&cinfo);                    // 销毁解码对象
	free(jpeg_line_buf);                                // 释放内存
	free(fb_line_buf);                                  // 释放内存

    return 0;
}

4.4、开启摄像头

int main(int argc, char **argv)
{

    ...

    /* start camera */
    if (0 != ioctl(camera.fd, VIDIOC_STREAMON, &type))
    {
        printf("Unable to start capture\n");
        goto _err;
    }

    printf("\nstart camera ...\n");
    while (1)
    {
        /* poll */
        memset(fds, 0, sizeof(fds));
        fds[0].fd = camera.fd;
        fds[0].events = POLLIN;
        if (1 == poll(fds, 1, -1))
        {
            /* dequeue buffer */
            struct v4l2_buffer buf;
            memset(&buf, 0, sizeof(struct v4l2_buffer));
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            
            if (0 != ioctl(camera.fd, VIDIOC_DQBUF, &buf))
            {
                printf("Unable to dequeue buffer\n");
                goto _ioc_dqbuf_err;
            }
            
            /* jpeg show on lcd */
            camera.bufs_index = buf.index;
            camera.buf_length = buf.length;
            jpeg_show_on_lcd(&lcd, &camera);

            /* queue buffer */
            if (0 != ioctl(camera.fd, VIDIOC_QBUF, &buf))
            {
                printf("Unable to queue buffer");
                goto _ioc_qbuf_err;
            }
        }
    }

    ...
    
}

4.5、完整的程序如下


#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <linux/types.h>          
#include <linux/videodev2.h>
#include <poll.h>
#include <sys/mman.h>
#include <jpeglib.h>
#include <linux/fb.h>

typedef struct lcd_mes {
    int fd;
    unsigned char *fb_base;
    int lcd_width;
    int lcd_height;
    unsigned int bpp;
    unsigned int line_width;
} lcd_mes;

typedef struct camera_mes {
    int fd;
    void *bufs[32];
    int bufs_index;
    int buf_length;
    char fmt[20];
    int frame_x_size;
    int frame_y_size;
} camera_mes;

int jpeg_show_on_lcd(lcd_mes *lcd, camera_mes *camera)
{
    int min_width, min_height;
    int valid_bytes;
    int offset_x, offset_y;

	struct jpeg_decompress_struct cinfo;
	struct jpeg_error_mgr jerr;
    
	cinfo.err = jpeg_std_error(&jerr);      // 错误处理对象与解码对象绑定
	jpeg_create_decompress(&cinfo);         // 初始化解码器
	jpeg_mem_src(&cinfo, camera->bufs[camera->bufs_index], camera->buf_length);   // 指定JPEG数据的来源

	jpeg_read_header(&cinfo, TRUE);         // 读取图像信息
	cinfo.out_color_space = JCS_RGB;        // 设置解码后的颜色空间为RGB

	jpeg_start_decompress(&cinfo);          // 开始解码
	
	unsigned char *jpeg_line_buf = (char *)malloc(cinfo.output_components * cinfo.output_width);    // 用于存储从JPEG解码器读取的一行数据
	unsigned int *fb_line_buf = (int *)malloc(lcd->line_width);                                     // 用于存储转换后的RGB数据,准备写入framebuffer

    min_width = (cinfo.output_width < lcd->lcd_width) ? cinfo.output_width : lcd->lcd_width;
    min_height = (cinfo.output_height < lcd->lcd_height) ? cinfo.output_height : lcd->lcd_height;

	valid_bytes = min_width * lcd->bpp / 8;             // 一行的有效字节数
	unsigned char *ptr = lcd->fb_base;

    offset_x = ((lcd->lcd_width - min_width) / 2) * lcd->bpp / 8;   // x方向居中
    offset_y = (lcd->lcd_height - min_height) / 2;      // y方向居中
    for (int i = 0; i < offset_y; i++)                  
        ptr += lcd->lcd_width * lcd->bpp / 8;
    
    unsigned int red, green, blue;
    unsigned int color;
	while (cinfo.output_scanline < min_height)
	{
		jpeg_read_scanlines(&cinfo, &jpeg_line_buf, 1); // 每次读取一行

		for(int i = 0; i < min_width; i++)              
		{
			red = jpeg_line_buf[i * 3];                 
			green = jpeg_line_buf[i * 3 + 1];           
			blue = jpeg_line_buf[i * 3 + 2];            
			color = red << 16 | green << 8 | blue;      // RGB888转RGB8888

			fb_line_buf[i] = color;
		}

		memcpy(ptr + offset_x, fb_line_buf, valid_bytes);   // 将一行数据写入framebuffer
		ptr += lcd->lcd_width * lcd->bpp / 8;               // 移动到下一行
	}
	
	jpeg_finish_decompress(&cinfo);                     // 完成解码
	jpeg_destroy_decompress(&cinfo);                    // 销毁解码对象
	free(jpeg_line_buf);                                // 释放内存
	free(fb_line_buf);                                  // 释放内存

    return 0;
}

int lcd_init(const char *fb_dev, lcd_mes *lcd)
{
    
    int screen_size;
    struct fb_var_screeninfo var;   

    if (fb_dev == NULL)
        goto _err;

    /* 1. open /dev/fb* */    
	lcd->fd = open(fb_dev, O_RDWR);
	if(lcd->fd < 0)
	{
        printf("can not open %s\n", fb_dev);
        goto _err;
    }

	/* 2. get lcd message */
	if (ioctl(lcd->fd, FBIOGET_VSCREENINFO, &var))
	{
		printf("can not get var\n");
		goto _err;
	}

    screen_size = var.xres * var.yres * var.bits_per_pixel / 8;
    lcd->line_width  = var.xres * var.bits_per_pixel / 8;
	lcd->lcd_width = var.xres;
	lcd->lcd_height = var.yres;
	lcd->bpp = var.bits_per_pixel;
	lcd->fb_base = mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, lcd->fd, 0);
	if (lcd->fb_base == (unsigned char *)-1)
    {
        printf("can not mmap\n");
        goto _err;
    }

    memset(lcd->fb_base, 0x00, screen_size);
    return 0;

_err:
    return -1;
}

int camera_init(const char *video, camera_mes *camera)
{   
    struct v4l2_fmtdesc fmtdesc;
    struct v4l2_frmsizeenum fsenum;
    int fmt_index = 0;
    int frame_index = 0;
    int buf_cnt;
    int i;

    if (video == NULL)
        goto _err;

    /* 1. open /dev/video* */
    camera->fd = open(video, O_RDWR);
    if (camera->fd < 0)
    {
        printf("can not open %s\n", video);
        goto _err;
    }

    /* 2. query capability */
    struct v4l2_capability cap;
    memset(&cap, 0, sizeof(struct v4l2_capability));

    if (0 == ioctl(camera->fd, VIDIOC_QUERYCAP, &cap))
    {        
        if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) 
        {
            fprintf(stderr, "Error opening device %s: video capture not supported.\n", video);
            goto _ioc_querycap_err;
        }

        if(!(cap.capabilities & V4L2_CAP_STREAMING)) 
        {
            fprintf(stderr, "%s does not support streaming i/o\n", video);
            goto _ioc_querycap_err;
        }
    }
    else
    {
        printf("can not get capability\n");
        goto _ioc_querycap_err;
    }

    /* 3. enum formt */
    while (1)
    {
        fmtdesc.index = fmt_index;  
        fmtdesc.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;  
        if (0 != ioctl(camera->fd, VIDIOC_ENUM_FMT, &fmtdesc))
            break;

        frame_index = 0;
        // printf("format %s,%d:\n", fmtdesc.description, fmtdesc.pixelformat);
        while (1)
        {
            memset(&fsenum, 0, sizeof(struct v4l2_frmsizeenum));
            fsenum.pixel_format = fmtdesc.pixelformat;
            fsenum.index = frame_index;

            /* get framesize */
            if (ioctl(camera->fd, VIDIOC_ENUM_FRAMESIZES, &fsenum) == 0)
            {
                // printf("\t%d: %d x %d\n", frame_index, fsenum.discrete.width, fsenum.discrete.height);
            }
            else
            {
                break;
            }

            frame_index++;
        }

        fmt_index++;
    }

    /* 4. set formt */
    struct v4l2_format fmt;
    memset(&fmt, 0, sizeof(struct v4l2_format));
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = camera->frame_x_size;
    fmt.fmt.pix.height = camera->frame_y_size;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
    fmt.fmt.pix.field = V4L2_FIELD_ANY;
    if (0 == ioctl(camera->fd, VIDIOC_S_FMT, &fmt))
    {
        // printf("the final frame-size has been set : %d x %d\n", fmt.fmt.pix.width, fmt.fmt.pix.height);
        camera->frame_x_size = fmt.fmt.pix.width;
        camera->frame_y_size = fmt.fmt.pix.height;
        strncpy(camera->fmt, "Motion-JPEG", strlen("Motion-JPEG"));
    }
    else
    {
        printf("can not set format\n");
        goto _ioc_sfmt_err;
    }

    /* 5. require buffer */
    struct v4l2_requestbuffers rb;
    memset(&rb, 0, sizeof(struct v4l2_requestbuffers));
    rb.count = 32;
    rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    rb.memory = V4L2_MEMORY_MMAP;

    if (0 == ioctl(camera->fd, VIDIOC_REQBUFS, &rb))
    {
        buf_cnt = rb.count;
        for(i = 0; i < rb.count; i++) 
        {
            struct v4l2_buffer buf;
            memset(&buf, 0, sizeof(struct v4l2_buffer));
            buf.index = i;
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            if (0 == ioctl(camera->fd, VIDIOC_QUERYBUF, &buf))
            {
                /* mmap */
                camera->bufs[i] = mmap(0, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera->fd, buf.m.offset);
                if(camera->bufs[i] == MAP_FAILED) 
                {
                    printf("Unable to map buffer");
                    goto _err;
                }
            }
            else
            {
                printf("can not query buffer\n");
                goto _err;
            }            
        }
    }
    else
    {
        printf("can not request buffers\n");
        goto _ioc_reqbufs_err;
    }

    /* 6. queue buffer */
    for(i = 0; i < buf_cnt; ++i) 
    {
        struct v4l2_buffer buf;
        memset(&buf, 0, sizeof(struct v4l2_buffer));
        buf.index = i;
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        if (0 != ioctl(camera->fd, VIDIOC_QBUF, &buf))
        {
            perror("Unable to queue buffer");
            goto _ioc_qbuf_err;
        }
    }

    camera->bufs_index = 0;     // init camera struct
    camera->buf_length = 0;
    return 0;

_ioc_qbuf_err:
_ioc_reqbufs_err:
_ioc_sfmt_err:
_ioc_querycap_err:
_err:
    return -1;
}

int main(int argc, char **argv)
{
    int ret;
    lcd_mes lcd;
    camera_mes camera;
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    struct pollfd fds[1];

    if (argc != 3)
    {
        printf("Usage: %s </dev/videoX> </dev/fbX>\n", argv[0]);
        return -1;
    }

    /* lcd init */
    ret = lcd_init(argv[2], &lcd);
    if (ret == -1)
    {
        printf("lcd init err !\n");
        goto _err;
    }

    printf("\n-------------- lcd message --------------\n");
    printf("screen pixel: %d x %d\n", lcd.lcd_width, lcd.lcd_height);
    printf("line width: %d (byte)\n", lcd.line_width);
	printf("bpp: %d\n", lcd.bpp);
    printf("-----------------------------------------\n");

    /* camera init */
    camera.frame_x_size = lcd.lcd_width;
    camera.frame_y_size = lcd.lcd_height;
    ret = camera_init(argv[1], &camera);
    if (ret == -1)
    {
        printf("camera init err !\n");
        goto _err;
    }

    printf("\n------------ camera message -------------\n");
    printf("frame size: %d x %d\n", camera.frame_x_size, camera.frame_y_size);
    printf("format: %s\n", camera.fmt);
    printf("-----------------------------------------\n");

    /* start camera */
    if (0 != ioctl(camera.fd, VIDIOC_STREAMON, &type))
    {
        printf("Unable to start capture\n");
        goto _err;
    }

    printf("\nstart camera ...\n");
    while (1)
    {
        /* poll */
        memset(fds, 0, sizeof(fds));
        fds[0].fd = camera.fd;
        fds[0].events = POLLIN;
        if (1 == poll(fds, 1, -1))
        {
            /* dequeue buffer */
            struct v4l2_buffer buf;
            memset(&buf, 0, sizeof(struct v4l2_buffer));
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            
            if (0 != ioctl(camera.fd, VIDIOC_DQBUF, &buf))
            {
                printf("Unable to dequeue buffer\n");
                goto _ioc_dqbuf_err;
            }
            
            /* jpeg show on lcd */
            camera.bufs_index = buf.index;
            camera.buf_length = buf.length;
            jpeg_show_on_lcd(&lcd, &camera);

            /* queue buffer */
            if (0 != ioctl(camera.fd, VIDIOC_QBUF, &buf))
            {
                printf("Unable to queue buffer");
                goto _ioc_qbuf_err;
            }
        }
    }

    /* close camera */
    if (0 != ioctl(camera.fd, VIDIOC_STREAMOFF, &type))
    {
        printf("Unable to stop capture\n");
        goto _ioc_streamoff_err;
    }
    close(camera.fd);

    return 0;

_ioc_streamoff_err:
_ioc_qbuf_err:
_ioc_dqbuf_err:
_err:
    return -1;
}

5、测试

5.1、编译应用程序

如果你使用的buildroot系统,需要交叉编译。

这里测试所使用的板子跑的是ubuntu,执行如下命令直接编译:

sudo gcc -o uvctolcd uvctolcd.c -ljpeg

5.2、运行应用程序

sudo ./uvctolcd /dev/video10 /dev/fb0

6、总结

参考文章:Linux摄像头(v4l2应用)——在LCD上实时显示摄像头采集JPEG数据_v4l2调用并显示图像-CSDN博客
源码gitee仓库:

仓库主页:
https://gitee.com/cattle_l/v4l2_app.git
直接拉取:
git clone https://gitee.com/cattle_l/v4l2_app.git

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/2311373.html

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!

相关文章

Linux下的c进程和java进程的通信-UnixSocket

1、开发c代码 引用的库 /usr/include c代码 #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/socket.h> #include <sys/un.h> #include <unistd.h>#define SOCKET_PATH "/tmp/my_socket"int mai…

音视频入门基础:RTP专题(15)——FFmpeg源码中,获取RTP的视频信息的实现

一、引言 通过FFmpeg命令可以获取到SDP文件描述的RTP流的视频压缩编码格式、色彩格式&#xff08;像素格式&#xff09;、分辨率、帧率信息&#xff1a; ffmpeg -protocol_whitelist "file,rtp,udp" -i XXX.sdp 本文以H.264为例讲述FFmpeg到底是从哪个地方获取到这…

【2025小白版】计算复试/保研机试模板(个人总结非GPT生成)附代码

一、编程语言选择 很多高校在机试中对编程语言都有明确规定&#xff0c;像复旦大学计算机学院就说明可选择 C、C 或 Java 语言答题&#xff0c;还支持 C11&#xff08;gcc5.4&#xff09;&#xff0c;C14&#xff08;g5.4&#xff09;&#xff0c;Java (openjdk1.8&#xff09…

aardio - 虚表 —— 两个虚表之间互相拖动交换数据

插入到虚表末尾的方法&#xff1a; import win.ui; import godking.vlistEx; /*DSG{{*/ mainForm win.form(text"vlistEx - table adapter";right849;bottom578;border"thin") mainForm.add( radiobutton{cls"radiobutton";text"移动&qu…

第一:goland安装

GOPROXY (会话临时性)&#xff0c;长久的可以在配置文件中配置 go env -w GOPROXYhttps://goproxy.cn,direct 长久的&#xff0c;在~/.bashrc文件中添加&#xff1a; export GOPROXYhttps://goproxy.cn,direct &#xff0d;&#xff0d;&#xff0d;&#xff0d;&#xff0d…

Dockerfile 深入浅出:从基础到进阶全解析

Dockerfile 深入浅出&#xff1a;从基础到进阶全解析 各位同学&#xff0c;大家好&#xff01;欢迎来到今天的 Dockerfile 课程。Docker 技术在当今的软件开发和部署领域可以说是非常热门&#xff0c;而 Dockerfile 作为构建 Docker 镜像的关键文件&#xff0c;掌握它对于我们…

Mybatis中的分页操作,如何使用PageHelper进行分页,以及Spring Boot整合Mybatis Plus分页

目的&#xff1a; 学会分页功能&#xff0c;学会分页方法 场景&#xff1a; 将下面的数据进行分页&#xff1a; 文章目录 Mybatis 单独使用分页&#xff08;没有整合&#xff09;1. PageHelper 插件 Spring Boot 整合 Mybatis Plus 使用分页1. selectPage 方法实现分页2. selec…

【网络安全】API安全防护完整指南

文章目录 API安全为什么 API 安全性重要?API 安全性与通用应用程序安全性的区别传统 Web 安全的主要特征API 安全的关键特征OWASP API 前 10 大安全威胁API1:2019 - 破坏对象级授权(Broken Object-Level Authorization)API2:2019 - 破坏用户身份验证(Broken User Authentic…

Docker 学习(四)——Dockerfile 创建镜像

Dockerfile是一个文本格式的配置文件&#xff0c;其内包含了一条条的指令(Instruction)&#xff0c;每一条指令构建一层&#xff0c;因此每一条指令的内容&#xff0c;就是描述该层应当如何构建。有了Dockerfile&#xff0c;当我们需要定制自己额外的需求时&#xff0c;只需在D…

本地部署 DeepSeek:从 Ollama 配置到 Spring Boot 集成

前言 随着人工智能技术的迅猛发展&#xff0c;越来越多的开发者希望在本地环境中部署和调用 AI 模型&#xff0c;以满足特定的业务需求。本文将详细介绍如何在本地环境中使用 Ollama 配置 DeepSeek 模型&#xff0c;并在 IntelliJ IDEA 中创建一个 Spring Boot 项目来调用该模型…

算法之 前缀和

文章目录 前缀和基础3427.变长子数组求和 前缀和与哈希表1524.和为奇数的子数组数目 距离和1685.有序数组中绝对值之和 前缀异或和1177.构建回文串检测 其他一维前缀和1310.子数组异或查询 二维前缀和1314.矩阵区域和 前缀和&#xff0c;就是定义pre[i] 为nums的前i个元素的和值…

机器视觉运动控制一体机在天地盖同步跟随贴合解决方案

市场应用背景 纸盒天地盖是一种包装形式&#xff0c;广泛应用于消费电子、食品礼盒、奢侈品及化妆品等领域。其采用高强度纸板&#xff0c;经过预组装处理&#xff0c;结构坚固稳定&#xff0c;能有效保护产品并提升品牌形象。随着包装行业快速发展&#xff0c;市场对天地盖的…

B站文生视频模型工程实践

1.前言 近年来&#xff0c;AI 内容生成&#xff08;AIGC&#xff09;领域的快速发展令人雀跃&#xff0c;OpenAI 在 2023 年初推出大型语言模型&#xff08;LLM&#xff09;GPT-4 受到了学术界和工业界的极大关注。OpenAI 随后在 2024 年初推出文生视频&#xff08;T2V&#xf…

嵌入式开发:傅里叶变换(5):基于STM32,实现CMSIS中的DSP库

目录 步骤 1&#xff1a;准备工作 步骤 2&#xff1a;创建 Keil 项目&#xff0c;并配置工程 步骤 3&#xff1a;在MDK工程上添加 CMSIS-DSP 库 步骤 5&#xff1a;编写代码 步骤 6&#xff1a;配置时钟和优化 步骤 7&#xff1a;调试与验证 步骤 8&#xff1a;优化和调…

【人工智能】GPT-4 vs DeepSeek-R1:谁主导了2025年的AI技术竞争?

前言 2025年&#xff0c;人工智能技术将迎来更加激烈的竞争。随着OpenAI的GPT-4和中国初创公司DeepSeek的DeepSeek-R1在全球范围内崭露头角&#xff0c;AI技术的竞争格局开始发生变化。这篇文章将详细对比这两款AI模型&#xff0c;从技术背景、应用领域、性能、成本效益等多个方…

【Python项目】基于深度学习的车辆特征分析系统

【Python项目】基于深度学习的车辆特征分析系统 技术简介&#xff1a;采用Python技术、MySQL数据库、卷积神经网络&#xff08;CNN&#xff09;等实现。 系统简介&#xff1a;该系统基于深度学习技术&#xff0c;特别是卷积神经网络&#xff08;CNN&#xff09;&#xff0c;用…

【江科大STM32】TIM输入捕获模式PWMI模式测频率

一、输入捕获测频率 接线图&#xff1a; 测信号的输入引脚为PA6&#xff0c;信号从PA6进来&#xff0c;待测的PWM信号也是STM32自己生成的&#xff0c;输出引脚是PA0&#xff0c;所以接线这里直接用一根线将PA0引到PA6就可以了。 如果有信号发生器的话&#xff0c;也可以设置成…

K8S学习之基础十六:k8s中Deployment更新策略

滚动更新 滚动更新是一种自动化程度较高的发布方式、用户体验比较平滑、是目前成熟型技术组织采用的主流发布方式&#xff0c;一次滚动发布一般有若干发布批次组成&#xff0c;每批的数量一般都是可配置的&#xff0c;可通过发布模板定义&#xff0c;例如第一批10%&#xff0c…

EtherNet/IP转Modbus解析基于网关模块的罗克韦尔PLC与Modbus上位机协议转换通讯案例

在工业自动化控制系统中&#xff0c;常常会遇到不同品牌和通信协议的设备需要协同工作的情况。本案例中&#xff0c;客户现场采用了 AB PLC&#xff0c;但需要控制的变频器仅支持 Modbus 协议。为了实现 AB PLC 对变频器的有效控制与监控&#xff0c;引入了捷米特 JM-EIP-RTU 网…

Devart dbForge Studio for MySQL Enterprise 9.0.338高效数据库管理工具

Devart dbForge Studio for MySQL Enterprise 9.0.338 是一款功能强大的 MySQL 数据库管理工具&#xff0c;专为数据库开发人员和管理员设计。它提供了丰富的功能&#xff0c;帮助用户更高效地管理、开发和维护 MySQL 数据库 Devart dbForge Studio for MySQL Enterprise 9.0.…