Linux下USB设备图像采集

news2025/1/2 4:36:48

操作系统 : Linux
采集方式 : v4l2(video4linux2)
设备目录 :/dev/video0

Linux系统
Linux采集的核心组件名称叫:v4l2即video4linux2的简称。是Linux中关于视频设备的内核驱动,在Linux中,视频设备是设备文件,可以像访问普通文件一样对其进行读写,摄像头设备文件位置是/dev/video0。

查看相机设备

方法一,直接通过ls查看/dev/目录下设备列表

ls /dev/video*

方法二,通过ffmpeg的ffprobe命令来查看连接到系统中的摄像头设备:
FFmpeg命令行安装

 sudo apt-get install ffmpeg
ffprobe /dev/video0

在这里插入图片描述video0设备采集出来的裸帧格式是:yuyv422,帧率30fps,分辨率640x480,因此在保存摄像头数据的时候,先对yuyu422的格式进行变化,将其变换为yuv420p的格式,然后再进行编码保存为h264的文件。

v4l2 常用命令

获取设备列表

v4l2-ctl --list-devices
#List supported video formats and resolutions of default video device 
v4l2-ctl --list-formats-ext
 
#List supported video formats and resolutions of a specific video device:
v4l2-ctl --list-formats-ext --device path/to/video_device
#eg:
v4l2-ctl --list-formats-ext --device /dev/video0
 
#Get all details of a video device:
v4l2-ctl --all --device path/to/video_device
#eg:
v4l2-ctl --all --device /dev/video0
 
#Capture a JPEG photo with a specific resolution from video device:
v4l2-ctl --device path/to/video_device --set-fmt-video=width=width,height=height,pixelformat=MJPG --stream-mmap --stream-to=path/to/output.jpg --stream-count=1
#eg:
v4l2-ctl --device /dev/video0 --set-fmt-video=width=1280,height=720,pixelformat=MJPG --stream-mmap --stream-to=/home/nvidia/Pictures/video0-output.jpg --stream-count=1
 
#Capture a raw video stream from video device:
v4l2-ctl --device path/to/video_device --set-fmt-video=width=width,height=height,pixelformat=format --stream-mmap --stream-to=path/to/output --stream-count=number_of_frames_to_capture
#eg:
v4l2-ctl --device /dev/video0 --set-fmt-video=width=1280,height=720,pixelformat=MJPG --stream-mmap --stream-to=/home/nvidia/Pictures --stream-count=10
 
#List all video device's controls and their values:
v4l2-ctl --list-ctrls --device /path/to/video_device
#eg:
v4l2-ctl --list-ctrls --device /dev/video0

测试相机可用性

$ cheese -d /dev/video0
$ ffplay -f v4l2 -input_format bayer_bggr16le -video_size 640x480 -i /dev/video0

代码

cmmon.h

#ifndef COMMON_H
#define COMMON_H


#ifdef __cplusplus
extern "C" {
#endif
#include <linux/videodev2.h>

//c varable
extern struct v4l2_fmtdesc fmtd[20];
extern unsigned char * displaybuf; //v4l2 video buffer
extern int current_video_state;
typedef struct buffer{
    void *start;
    unsigned int length;
}buffer;

//define
#define SRC_WIDTH 1280
#define SRC_HEIGHT 720
#define DST_WIDTH 1280
#define DST_HEIGHT 720
#define NB_BUFFER 4

#ifdef __cplusplus
}
#endif
#endif

v4l2.h

#ifndef V4L2_H
#define V4L2_H

#include <fcntl.h>
#include <linux/fb.h>
#include <linux/videodev2.h>
#include <poll.h>
#include <pthread.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <sys/types.h>
#include <time.h>
#include <unistd.h>
#include <errno.h>
#include "config.h"

#ifdef __cplusplus
extern "C" {
#endif

//function
int c_OpenDevice(char* video);
void c_CloseDevice(int videofd);
int c_FormatDevice(unsigned int pixformat, int videofd);
int c_RequestBuffer(buffer *vbuffer, int videofd);
int c_GetBuffer(unsigned char* yuvBuffer, buffer *vbuffer, int videofd);
void c_DeintDevice(int videofd, buffer *vbuffer);
void c_NV12_TO_RGB24(unsigned char *yuyv, unsigned char *rgb, int width, int height);
void c_yuyv_to_rgb(unsigned char *yuyvdata, unsigned char *rgbdata, int w, int h);

//varable


#ifdef __cplusplus
}
#endif
#endif

CAMERATHREAD1.H

#ifndef CAMERATHREAD1_H
#define CAMERATHREAD1_H

#include <QObject>
#include <QThread>
#include <QDebug>

class CameraThread1 : public QThread
{
    Q_OBJECT
public:
    CameraThread1();

    void run() override;

    void getCam1Buf();

    bool previewCam1 = true;

    QString camera1Dev="";

signals:
    void showCamera1(unsigned char *buffer);
};

#endif // CAMERATHREAD1_H

MAINWINDOW.H

#ifndef MAINWINDOW_H
#define MAINWINDOW_H

#include <QMainWindow>
#include <QLabel>
#include "common.h"
#include "camerathread1.h"

QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
QT_END_NAMESPACE

class MainWindow : public QMainWindow
{
    Q_OBJECT

public:
    MainWindow(QWidget *parent = nullptr);
    ~MainWindow();
void checkIspServer();


private:
    Ui::MainWindow *ui;

   // QLabel *camera1;
    CameraThread1 *cameraThread1;
     Common common;

public slots:
    void displayCam1Buf(unsigned char *buffer);

camerathread1.cpp

#include "camerathread1.h"
#include "v4l2.h"

CameraThread1::CameraThread1()
{

}

void CameraThread1::run()
{
     msleep(200);
    getCam1Buf();
}

void CameraThread1::getCam1Buf()
{
    unsigned char *yuvBuffer = (unsigned char*)malloc(SRC_WIDTH * SRC_HEIGHT * 3);
    unsigned char *rgbBuffer = (unsigned char*)malloc(SRC_WIDTH * SRC_HEIGHT * 3);
    buffer * vbuffer;
    vbuffer = (buffer*)calloc (NB_BUFFER, sizeof (*vbuffer));

    int fd = c_OpenDevice(camera1Dev.toLocal8Bit().data());
    if(fd < 0) return;

    c_FormatDevice(V4L2_PIX_FMT_YUYV, fd);
    c_RequestBuffer(vbuffer, fd);

    while (previewCam1) {
        if(c_GetBuffer(yuvBuffer, vbuffer, fd) != 0) return;
        //c_NV12_TO_RGB24(yuvBuffer, rgbBuffer, SRC_WIDTH, SRC_HEIGHT);
        c_yuyv_to_rgb(yuvBuffer, rgbBuffer, SRC_WIDTH, SRC_HEIGHT);
        emit showCamera1(rgbBuffer);
        msleep(1000/30);
    }

    //close camera
    c_DeintDevice(fd, vbuffer);
    c_CloseDevice(fd);
}

v4l2.c

#include "v4l2.h"

int buf_type = -1;
int current_video_state = 0;

struct v4l2_fmtdesc fmtd[];
struct v4l2_format format;

unsigned char * displaybuf = NULL;
unsigned char * rgb24 = NULL;

int c_OpenDevice(char *video)
{
    struct v4l2_capability cap;
    struct v4l2_fmtdesc fmtdesc;

    /* open video */
    int videofd = open(video, O_RDWR);
    if ( -1 == videofd ) {
        printf("Error: cannot open %s device\n",video);
        return videofd;
    }
    printf("The %s device was opened successfully.\n", video);

    /* check capability */
    memset(&cap, 0, sizeof(struct v4l2_capability));
    if ( ioctl(videofd, VIDIOC_QUERYCAP, &cap) < 0 ) {
        printf("Error: get capability.\n");
        goto fatal;
    }

    /* query all pixformat */
    if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE){
        buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    }else if(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE){
        buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    }else{
        printf("Error: application not support this device %s.\n",video);
        goto fatal;
    }
    memset(fmtd, 0, sizeof(fmtd));//note: nedd clean array first.
    fmtdesc.index=0;
    fmtdesc.type=buf_type;
    while(ioctl(videofd, VIDIOC_ENUM_FMT, &fmtdesc) != -1) {
        fmtd[fmtdesc.index] = fmtdesc;
        // printf("\t%d.%s\n",fmtdesc.index+1,fmtdesc.description);
        fmtdesc.index++;
    }
    return videofd;
fatal:
    c_CloseDevice(videofd);
}

int c_FormatDevice(unsigned int pixformat, int videofd)
{
    /* set format */
    memset(&format, 0, sizeof(struct v4l2_format));
    format.type = buf_type;


   if (format.type == V4L2_BUF_TYPE_VIDEO_CAPTURE){
        format.fmt.pix.width = SRC_WIDTH;
        format.fmt.pix.height = SRC_HEIGHT;
        format.fmt.pix.pixelformat = pixformat;
        format.fmt.pix.field = V4L2_FIELD_ANY;
        printf("VIDIO_S_FMT: type=%d, w=%d, h=%d, fmt=0x%x, field=%d\n",
               format.type, format.fmt.pix.width,
               format.fmt.pix.height, format.fmt.pix.pixelformat,
               format.fmt.pix.field);

    }else{
        format.fmt.pix_mp.width = SRC_WIDTH;
        format.fmt.pix_mp.height = SRC_HEIGHT;
        format.fmt.pix_mp.pixelformat = pixformat;
        format.fmt.pix_mp.field = V4L2_FIELD_ANY;
        printf(">> VIDIO_S_FMT: type=%d, w=%d, h=%d, fmt=0x%x, field=%d\n",
               format.type, format.fmt.pix_mp.width,
               format.fmt.pix_mp.height, format.fmt.pix_mp.pixelformat,
               format.fmt.pix_mp.field);
    }

    if (ioctl(videofd, VIDIOC_S_FMT, &format) < 0) {
        printf("Error: set format %d.\n", errno);
        return errno;
    }

    /* get format */
    if (ioctl(videofd, VIDIOC_G_FMT, &format) < 0) {
        printf("Error: get format %d.\n", errno);
        return errno;
    }
    printf("VIDIO_G_FMT: type=%d, w=%d, h=%d, fmt=0x%x, field=%d\n",
           format.type, format.fmt.pix.width,
           format.fmt.pix.height, format.fmt.pix.pixelformat,
           format.fmt.pix.field);
    return 0;
}


int c_RequestBuffer(buffer *vbuffer, int videofd)
{
    struct v4l2_requestbuffers reqbuf;
    struct v4l2_buffer v4l2_buf;

    /* buffer preparation */
    memset(&reqbuf, 0, sizeof(struct v4l2_requestbuffers));
    reqbuf.count = NB_BUFFER;
    reqbuf.type = buf_type;
    reqbuf.memory = V4L2_MEMORY_MMAP;

    if (ioctl(videofd, VIDIOC_REQBUFS, &reqbuf) < 0) {
        printf("Error: request buffer error=%d.\n",errno);
        goto fatal;
    }

    //map buffers
    for (unsigned int i = 0; i < reqbuf.count; i++) {
        memset(&v4l2_buf, 0, sizeof(struct v4l2_buffer));
        v4l2_buf.index = i;
        v4l2_buf.type = buf_type;
        v4l2_buf.memory = V4L2_MEMORY_MMAP;
        if (v4l2_buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
            struct v4l2_plane buf_planes[format.fmt.pix_mp.num_planes];
            v4l2_buf.m.planes = buf_planes;
            v4l2_buf.length = format.fmt.pix_mp.num_planes;
        }

        if (ioctl(videofd, VIDIOC_QUERYBUF, &v4l2_buf) < 0) {
            printf("Error: query buffer %d.\n", errno);
            goto fatal;
        }

        if (v4l2_buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
            for (int j = 0; j < format.fmt.pix_mp.num_planes; j++) {
                vbuffer[i * format.fmt.pix_mp.num_planes + j].start
                        = mmap(0, v4l2_buf.m.planes[j].length, PROT_READ,
                               MAP_SHARED, videofd, v4l2_buf.m.planes[j].m.mem_offset);

                vbuffer[i * format.fmt.pix_mp.num_planes + j].length
                        = v4l2_buf.m.planes[j].length;
            }
        }else{//V4L2_BUF_TYPE_VIDEO_CAPTURE
            vbuffer[i].start = mmap(0, v4l2_buf.length, PROT_READ, MAP_SHARED, videofd,v4l2_buf.m.offset);
            vbuffer[i].length = v4l2_buf.length;
        }

        if (vbuffer[i].start == MAP_FAILED) {
            printf("Error: mmap buffers.\n");
            goto fatal;
        }
    }

    //queue buffers
    for (int i = 0; i < reqbuf.count; ++i) {
        memset(&v4l2_buf, 0, sizeof(struct v4l2_buffer));
        v4l2_buf.index = i;
        v4l2_buf.type = buf_type;
        v4l2_buf.memory = V4L2_MEMORY_MMAP;
        if (v4l2_buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
            struct v4l2_plane buf_planes[format.fmt.pix_mp.num_planes];
            v4l2_buf.m.planes = buf_planes;
            v4l2_buf.length = format.fmt.pix_mp.num_planes;
        }
        if (ioctl(videofd, VIDIOC_QBUF, &v4l2_buf) < 0) {
            printf("Error: queue buffers, ret:%d i:%d\n", errno, i);
            goto fatal;
        }
    }
    printf("Queue buf done.\n");

    //stream on
    if (ioctl(videofd, VIDIOC_STREAMON, &buf_type) < 0) {
        printf("Error: streamon failed erron = %d.\n",errno);
        goto fatal;
    }
    //open success
    current_video_state = 1;
    return 0;

fatal:
    printf("init camera fial!\n");
    current_video_state = 0;
    c_CloseDevice(videofd);
    return -1;
}

int c_GetBuffer(unsigned char* yuvBuffer, buffer *vbuffer, int videofd)
{
    struct v4l2_buffer v4l2_buf;
    int buf_index = -1;
    int planes_num = format.fmt.pix_mp.num_planes;

    memset(yuvBuffer, 0, SRC_WIDTH * SRC_HEIGHT * 3);

    // dqbuf from video node
    memset(&v4l2_buf, 0, sizeof(struct v4l2_buffer));
    v4l2_buf.type = buf_type;
    v4l2_buf.memory = V4L2_MEMORY_MMAP;
    if (v4l2_buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
        struct v4l2_plane planes[planes_num];
        v4l2_buf.m.planes = planes;
        v4l2_buf.length = planes_num;
    }

    if (ioctl(videofd, VIDIOC_DQBUF, &v4l2_buf) < 0) {
        printf("Error: dequeue buffer, errno %d\n", errno);
        return errno;
    }

    buf_index = v4l2_buf.index;
    if (v4l2_buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
        for(int i = 0;i < planes_num; i++){
            memcpy(yuvBuffer, vbuffer[buf_index].start, format.fmt.pix_mp.plane_fmt[i].sizeimage);
        }
    }else{// V4L2_BUF_TYPE_VIDEO_CAPTURE
        memcpy(yuvBuffer, vbuffer[buf_index].start, format.fmt.pix.sizeimage);
    }

    if (ioctl(videofd, VIDIOC_QBUF, &v4l2_buf) < 0) {
        printf("Error: queue buffer.\n");
        return errno;
    }
    return 0;
}


void c_CloseDevice(int videofd)
{
    close(videofd);
}

void c_DeintDevice(int videofd, buffer *vbuffer)
{
    struct v4l2_requestbuffers v4l2_rb;

//    if(current_video_state != 1)
//        return;

    if(ioctl(videofd, VIDIOC_STREAMOFF, &buf_type) < 0 ){
        printf("Error: stream close failed erron= %d\n", errno);
        return;
    }

    for (int i = 0; i < NB_BUFFER; i++){
        if((i < NB_BUFFER -1) && (vbuffer[i].length !=  vbuffer[i+1].length))
            munmap (vbuffer[i].start, vbuffer[i+1].length);//first buffer.length maybe not current
        else
            munmap (vbuffer[i].start, vbuffer[i].length);
    }

    memset(&v4l2_rb, 0, sizeof(struct v4l2_requestbuffers));
    v4l2_rb.count = 0;
    v4l2_rb.type = buf_type;
    v4l2_rb.memory = V4L2_MEMORY_MMAP;
    if (ioctl(videofd, VIDIOC_REQBUFS, &v4l2_rb) < 0)
        printf("Error: release buffer error=%d.\n",errno);

    free(vbuffer);
    vbuffer=NULL;
}

void c_NV12_TO_RGB24(unsigned char *data, unsigned char *rgb, int width, int height)
{
    int index = 0;
    unsigned char *ybase = data;
    unsigned char *ubase = &data[width * height];
    unsigned char Y, U, V;
    int R, G, B;
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            //YYYYYYYYUVUV
            Y = ybase[x + y * width];
            U = ubase[y / 2 * width + (x / 2) * 2];
            V = ubase[y / 2 * width + (x / 2) * 2 + 1];

            R = Y + 1.4075 * (V - 128);
            G = Y - 0.3455 * (U - 128) - 0.7169 * (V - 128);
            B = Y + 1.779 * (U - 128);

            if(R > 255)
                R = 255;
            else if(R < 0)
                R = 0;

            if(G > 255)
                G = 255;
            else if(G < 0)
                G = 0;

            if(B > 255)
                B = 255;
            else if(B < 0)
                B = 0;

            rgb[index++] = R; //R
            rgb[index++] = G; //G
            rgb[index++] = B; //B
        }
    }
}


void c_yuyv_to_rgb(unsigned char *yuyvdata, unsigned char *rgbdata, int w, int h)
{
    int r1, g1, b1;
    int r2, g2, b2;
    for (int i = 0; i < w * h / 2; i++)
    {
        char data[4];
        memcpy(data, yuyvdata + i * 4, 4);
        // Y0 U0 Y1 V1-->[Y0 U0 V1] [Y1 U0 v1]
        unsigned char Y0 = data[0];
        unsigned char U0 = data[1];
        unsigned char Y1 = data[2];
        unsigned char V1 = data[3];

        r1 = Y0 + 1.4075 * (V1-128); if(r1>255)r1=255; if(r1<0)r1=0;
        g1 =Y0 - 0.3455 * (U0-128) - 0.7169 * (V1-128); if(g1>255)g1=255; if(g1<0)g1=0;
        b1 = Y0 + 1.779 *(U0-128); if(b1>255)b1=255; if(b1<0)b1=0;

        r2 = Y1+1.4075* (V1-128) ;if(r2>255)r2=255; if(r2<0)r2=0;
        g2 = Y1- 0.3455 *(U0-128) - 0.7169*(V1-128); if(g2>255)g2=255;if(g2<0)g2=0;
        b2 = Y1+ 1.779 * (U0-128);if(b2>255)b2=255;if(b2<0)b2=0;

        rgbdata[i * 6 + 0] = r1;
        rgbdata[i * 6 + 1] = g1;
        rgbdata[i * 6 + 2] = b1;
        rgbdata[i * 6 + 3] = r2;
        rgbdata[i * 6 + 4] = g2;
        rgbdata[i * 6 + 5] = b2;
    }
}

mainwindow.cpp

#include "mainwindow.h"
#include "ui_mainwindow.h"

#include <QDebug>
#include <QProcess>
#include <QStorageInfo>
#include <QDirIterator>
#include <libudev.h>
#include <QSocketNotifier>


MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent)
    , ui(new Ui::MainWindow)
{
    ui->setupUi(this);




    cameraThread1 = new CameraThread1;
    //cameraThread1->camera1Dev = "/dev/video-camera0";

    connect(cameraThread1, &CameraThread1::showCamera1, this, &MainWindow::displayCam1Buf);
    cameraThread1->start();


}

void MainWindow::displayCam1Buf(unsigned char *buffer)
{
    QImage img;
    QPixmap pixmap;
    img = QImage(buffer, 1280, 720, QImage::Format_RGB888);
    QPixmap scaledPixmap = pixmap.fromImage(img);

    ui->camera1->setPixmap(scaledPixmap);
   // qDebug("<<<<<< %s %d\n",__FILE__,__LINE__);
}

MainWindow::~MainWindow()
{

    delete ui;
}


本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/1850000.html

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!

相关文章

Python实例:openpyxl读写单元格

原文链接&#xff1a;http://www.juzicode.com/python-example-openpyxl-access-data 本文介绍openpyxl模块几种读写单元格的方法&#xff0c;先手动创建一个表格&#xff0c;在代码里先用load_workbook()方法读取这个表格生成一个Workbook对象wb&#xff0c;再通过wb得到一个…

indexedDB---掌握浏览器内建数据库的基本用法

1.认识indexedDB IndexedDB 是一个浏览器内建的数据库&#xff0c;它可以存放对象格式的数据&#xff0c;类似本地存储localstore&#xff0c;但是相比localStore 10MB的存储量&#xff0c;indexedDB可存储的数据量远超过这个数值&#xff0c;具体是多少呢&#xff1f; 默认情…

将WIN10的wifi上网分享给以太网接口

目录 打开网络设置设置属性点这里的设置将wlan主机的以太网接口IP设为自动获取 如果连接不成功&#xff0c;拔网线重连一次 打开网络设置 设置属性 点这里的设置 将wlan主机的以太网接口IP设为自动获取 如果连接不成功&#xff0c;拔网线重连一次

什么是N卡和A卡?有什么区别?

名人说&#xff1a;莫听穿林打叶声&#xff0c;何妨吟啸且徐行。—— 苏轼《定风波莫听穿林打叶声》 本篇笔记整理&#xff1a;Code_流苏(CSDN)&#xff08;一个喜欢古诗词和编程的Coder&#x1f60a;&#xff09; 目录 一、什么是N卡和A卡&#xff1f;有什么区别&#xff1f;…

Desoutter智能拧紧中枢Connect过压维修

马头智能拧紧中枢过压维修是马头拧紧设备维护中的重要环节。当出现马头拧紧设备中枢过压现象时&#xff0c;会导致设备性能下降&#xff0c;甚至损坏设备&#xff0c;因此及时对过压中枢进行维修是保障设备正常运转的关键。 Desoutter电动螺丝刀控制器过压的原因可能有很多&am…

数学建模系列(3/4):典型建模方法

目录 引言 1. 回归分析 1.1 线性回归 基本概念 Matlab实现 1.2 多元回归 基本概念 Matlab实现 1.3 非线性回归 基本概念 Matlab实现 2. 时间序列分析 2.1 时间序列的基本概念 2.2 移动平均 基本概念 Matlab实现 2.3 指数平滑 基本概念 Matlab实现 2.4 ARIM…

HarmonyOS Next 系列之沉浸式状态实现的多种方式(七)

系列文章目录 HarmonyOS Next 系列之省市区弹窗选择器实现&#xff08;一&#xff09; HarmonyOS Next 系列之验证码输入组件实现&#xff08;二&#xff09; HarmonyOS Next 系列之底部标签栏TabBar实现&#xff08;三&#xff09; HarmonyOS Next 系列之HTTP请求封装和Token…

《计算机英语》测试练习题

作业3 一、单选题 内存条 的英文翻译是 A. memory chip (内存条通常指的是内存条上的存储芯片&#xff0c;但整个内存条的英文翻译应为 "RAM"&#xff0c;即 Random Access Memory 随机存取存储器) capacitor的中文意思是 D. 电容器 relay 的解释是 A. 继电器 por…

一、docker简介及卸载、安装

目录 一、Docker 简介 二、dockers三要素 1、Docker镜像&#xff08;image&#xff09; 2、Docker仓库 3、Docker容器 三、docker架构图 四. Docker 运行的基本流程 五、docker 卸载 1、停止docker服务 2、查看yum安装的docker文件包 3、查看docker相关的rpm源文件 …

ardupilot开发 --- Jetson Orin Nano 后篇

我拼命加速&#xff0c;但贫穷始终快我一步 0~1920. visp-d455&#xff1a;基于IBVS的Pixhawk无人机视觉伺服20.1 基础关于连接、通讯、UDP forward服务&#xff1a;一些相关的、有用的例程Linux C程序的gdb断点调试搭建仿真解决【testPixhawkDroneTakeoff.cpp例程能解锁但起飞…

优选免单模式:电商销售的新篇章

随着电商市场的日益繁荣&#xff0c;各种创新销售模式层出不穷。其中&#xff0c;优选免单模式以其独特的运作方式和激励机制&#xff0c;吸引了大量消费者的目光。该模式的核心在于通过降低商品售价、引入社交元素以及设计阶梯式奖励&#xff0c;激发消费者的购买热情&#xf…

[已解决]ImportError: DLL load failed while importing win32api: 找不到指定的程序。

使用pip install pywin32302安装后import找不到win32api 失败尝试 上网找别人的解决方案&#xff0c;大部分解决方案都是通过复制下面两个dll文件到 下面这个文件夹&#xff0c;并且复制到C:\Windows\System32&#xff0c;从而解决问题&#xff0c;但是我没能成功。 解决方…

Python编辑器pycharm详细安装步骤

PyCharm 的详细安装步骤 以下是在 Windows 系统上安装 PyCharm 的详细步骤&#xff1a; 第一步&#xff1a;下载安装程序 访问 PyCharm 官方网站&#xff08;https://www.jetbrains.com/pycharm/&#xff09;&#xff0c;根据自己的需求选择社区版&#xff08;Community&…

分享uniapp + Springboot3+vue3小程序项目实战

分享uniapp Springboot3vue3小程序项目实战 经过10天敲代码&#xff0c;终于从零到项目测试完成&#xff0c;一个前后端分离的小程序实战项目学习完毕 时间从6月12日 到6月22日&#xff0c;具有程序开发基础&#xff0c;第一次写uniapp,Springboot以前用过&#xff0c;VUE3也…

docker in docker 在CI中应用解析

docker in docker 简介 docker里嵌套运行docker&#xff0c;本文讲解其在jenkins和gitlab-runner 种的调用流程 一、用于jenkins 容器化部署jenkins时调用docker命令集成CI功能 [rootops-demo~]# docker inspect jenkins --format"{{json .Mounts}}" [{"T…

电脑文件夹怎么加密?文件夹加密的5种方法

在数字化时代&#xff0c;信息安全显得尤为重要。对于个人电脑用户来说&#xff0c;文件夹加密是一种有效保护隐私和数据安全的方法。本文将介绍五种文件夹加密的方法&#xff0c;帮助您更好地保护自己的重要文件。 如何设置文件夹密码方法一&#xff1a;利用Windows系统自带的…

docker 基本用法及跨平台使用

一、Docker的优点 docker 主要解决的问题就是程序开发过程中编译和部署中遇到的环境配置的问题。 1.1 Docker与其他虚拟机层次结构的区别** 运行程序重点关注点在于环境。 VM虚拟机是基于Hypervisor虚拟化服务运行的。 Docker是基于内核的虚拟化技术实现的。 1.2 Docker的技…

深入分析并可视化城市轨道数据

介绍 中国城市化进程加速中&#xff0c;城市轨道交通的迅速扩张成为提升城市运行效率和居民生活品质的关键。这一网络从少数大城市延伸至众多大中型城市&#xff0c;映射了经济飞跃和城市管理现代化。深入分析并可视化城市轨道数据&#xff0c;对于揭示网络特性、评估效率、理…

计算机组成原理 | 数据的表示、运算和校验(3)数据处理与存储

移位 舍入和扩展 存储模式和对齐 不按边界对齐&#xff0c;访存次数会增加一次

大型语言模型在AMD GPU上的推理优化

Large language model inference optimizations on AMD GPUs — ROCm Blogs 大型语言模型&#xff08;LLMs&#xff09;已经改变了自然语言处理和理解&#xff0c;促进了在多个领域中的众多人工智能应用。LLMs在包括AI助手、聊天机器人、编程、游戏、学习、搜索和推荐系统在内的…