paddleSeg项目实战

news2024/9/24 14:33:28
  • 问题1:cmake 编译报错
    解决办法:添加一条语句
set(DEMO_NAME "test_seg")
#这里的test_seg就是src里的文件名
  • 问题2:
严重性	代码	说明	项目	文件	行	禁止显示状态
错误	LNK2038	检测到“RuntimeLibrary”的不匹配项: 值“MT_StaticRelease”不匹配值“MD_DynamicRelease”(test_seg.obj 中)	test_seg	D:\workshop\PaddleSeg-release-2.8\deploy\cpp\build\libcpmt.lib(locale0.obj)	1	

解决办法:
在这里插入图片描述

  • 问题3:
       严重性	代码	说明	项目	文件	行	禁止显示状态
错误	LNK1181	无法打开输入文件“D:\paddle_inference102\paddle\lib\.obj”	test_seg	D:\workshop\PaddleSeg-release-2.8\deploy\cpp\build\LINK	1	

解决方法:

在这里插入图片描述

结果:

在这里插入图片描述

  • 完整cmake文件
cmake_minimum_required(VERSION 3.0)
project(cpp_inference_demo CXX C)

option(WITH_MKL        "Compile demo with MKL/OpenBlas support, default use MKL."       ON)
option(WITH_GPU        "Compile demo with GPU/CPU, default use CPU."                    OFF)
option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static."   ON)
option(USE_TENSORRT "Compile demo with TensorRT."   OFF)
option(WITH_ROCM "Compile demo with rocm." OFF)

if (WIN32)
  SET(PADDLE_LIB "" CACHE PATH "Location of libraries")
  SET(PADDLE_LIB_NAME "" CACHE STRING "libpaddle_inference")

  include(cmake/yaml-cpp.cmake)
  include_directories("${CMAKE_SOURCE_DIR}/")
  include_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/src/ext-yaml-cpp/include")
  link_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/lib")
endif()

if(NOT WITH_STATIC_LIB)
  add_definitions("-DPADDLE_WITH_SHARED_LIB")
else()
  # PD_INFER_DECL is mainly used to set the dllimport/dllexport attribute in dynamic library mode.
  # Set it to empty in static library mode to avoid compilation issues.
  add_definitions("/DPD_INFER_DECL=")
endif()

macro(safe_set_static_flag)
    foreach(flag_var
        CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
        CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
      if(${flag_var} MATCHES "/MD")
        string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
      endif(${flag_var} MATCHES "/MD")
    endforeach(flag_var)
endmacro()
set(DEMO_NAME "test_seg")
if(NOT DEFINED PADDLE_LIB)
  message(FATAL_ERROR "please set PADDLE_LIB with -DPADDLE_LIB=/path/paddle/lib")
endif()
if(NOT DEFINED DEMO_NAME)
  message(FATAL_ERROR "please set DEMO_NAME with -DDEMO_NAME=demo_name")
endif()

include_directories("${PADDLE_LIB}/")
set(PADDLE_LIB_THIRD_PARTY_PATH "${PADDLE_LIB}/third_party/install/")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}cryptopp/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}paddle2onnx/include")

link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}cryptopp/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}paddle2onnx/lib")
link_directories("${PADDLE_LIB}/paddle/lib")

if (WIN32)
  add_definitions("/DGOOGLE_GLOG_DLL_DECL=")
  option(MSVC_STATIC_CRT "use static C Runtime library by default" ON)
  if (MSVC_STATIC_CRT)
    if (WITH_MKL)
      set(FLAG_OPENMP "/openmp")
    endif()
    set(CMAKE_C_FLAGS_DEBUG   "${CMAKE_C_FLAGS_DEBUG} /bigobj /MTd ${FLAG_OPENMP}")
    set(CMAKE_C_FLAGS_RELEASE  "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT ${FLAG_OPENMP}")
    set(CMAKE_CXX_FLAGS_DEBUG  "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd ${FLAG_OPENMP}")
    set(CMAKE_CXX_FLAGS_RELEASE   "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT ${FLAG_OPENMP}")
    safe_set_static_flag()
    if (WITH_STATIC_LIB)
      add_definitions(-DSTATIC_LIB)
    endif()
  endif()
else()
  if(WITH_MKL)
    set(FLAG_OPENMP "-fopenmp")
  endif()
  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ${FLAG_OPENMP}")
endif()

if(WITH_GPU)
  if(NOT WIN32)
    set(CUDA_LIB "/usr/local/cuda/lib64/" CACHE STRING "CUDA Library")
  endif(NOT WIN32)
endif()

if(NOT WIN32)
    if (USE_TENSORRT AND WITH_GPU)
      set(TENSORRT_ROOT "" CACHE STRING "The root directory of TensorRT library")
      if("${TENSORRT_ROOT}" STREQUAL "")
          message(FATAL_ERROR "The TENSORRT_ROOT is empty, you must assign it a value with CMake command. Such as: -DTENSORRT_ROOT=TENSORRT_ROOT_PATH ")
      endif()
      set(TENSORRT_INCLUDE_DIR ${TENSORRT_ROOT}/include)
      set(TENSORRT_LIB_DIR ${TENSORRT_ROOT}/lib)
      file(READ ${TENSORRT_INCLUDE_DIR}/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
      string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
        "${TENSORRT_VERSION_FILE_CONTENTS}")
      if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
        file(READ ${TENSORRT_INCLUDE_DIR}/NvInferVersion.h TENSORRT_VERSION_FILE_CONTENTS)
        string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
          "${TENSORRT_VERSION_FILE_CONTENTS}")
      endif()
      if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
        message(SEND_ERROR "Failed to detect TensorRT version.")
      endif()
      string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
        TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
      message(STATUS "Current TensorRT header is ${TENSORRT_INCLUDE_DIR}/NvInfer.h. "
        "Current TensorRT version is v${TENSORRT_MAJOR_VERSION}. ")
      include_directories("${TENSORRT_INCLUDE_DIR}")
      link_directories("${TENSORRT_LIB_DIR}")
    endif()
endif()

if(WITH_MKL)
  set(MATH_LIB_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}mklml")
  include_directories("${MATH_LIB_PATH}/include")
  if(WIN32)
    set(MATH_LIB ${MATH_LIB_PATH}/lib/mklml${CMAKE_STATIC_LIBRARY_SUFFIX}
                 ${MATH_LIB_PATH}/lib/libiomp5md${CMAKE_STATIC_LIBRARY_SUFFIX})
  else()
    set(MATH_LIB ${MATH_LIB_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX}
                 ${MATH_LIB_PATH}/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX})
  endif()
  set(MKLDNN_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}mkldnn")
  if(EXISTS ${MKLDNN_PATH})
    include_directories("${MKLDNN_PATH}/include")
    if(WIN32)
      set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib)
    else()
      set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0)
    endif()
  endif()
else()
  set(OPENBLAS_LIB_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}openblas")
  include_directories("${OPENBLAS_LIB_PATH}/include/openblas")
  if(WIN32)
    set(MATH_LIB ${OPENBLAS_LIB_PATH}/lib/openblas${CMAKE_STATIC_LIBRARY_SUFFIX})
  else()
    set(MATH_LIB ${OPENBLAS_LIB_PATH}/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX})
  endif()
endif()

if(WITH_STATIC_LIB)
  if(WIN32)
    set(DEPS ${PADDLE_LIB}/paddle/lib/${PADDLE_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX})
  else()
    set(DEPS ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX})
  endif()
else()
  if(WIN32)
    set(DEPS ${PADDLE_LIB}/paddle/lib/${PADDLE_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX})
  else()
    set(DEPS ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX})
  endif()
endif()

if (NOT WIN32)
  set(EXTERNAL_LIB "-lrt -ldl -lpthread")
  set(DEPS ${DEPS}
      ${MATH_LIB} ${MKLDNN_LIB}
      glog gflags protobuf xxhash cryptopp
      ${EXTERNAL_LIB})
else()
  set(DEPS ${DEPS}
      ${MATH_LIB} ${MKLDNN_LIB}
      glog gflags_static libprotobuf xxhash cryptopp-static libyaml-cppmt ${EXTERNAL_LIB})
	  set(DEPS ${DEPS} libcmt shlwapi)
endif(NOT WIN32)

if(WITH_GPU)
  if(NOT WIN32)
    if (USE_TENSORRT)
      set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
      set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
    endif()
    set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
  else()
    SET(CUDA_LIB "" CACHE PATH "Location of libraries")
    if (USE_TENSORRT)
      set(DEPS ${DEPS} ${CUDA_LIB}/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
      set(DEPS ${DEPS} ${CUDA_LIB}/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
    endif()
    set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} )
    set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} )
    set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX} )
  endif()
endif()

if(WITH_ROCM)
  if(NOT WIN32)
    set(DEPS ${DEPS} ${ROCM_LIB}/libamdhip64${CMAKE_SHARED_LIBRARY_SUFFIX})
  endif()
endif()

if(NOT WIN32)
  include_directories(/usr/local/include)
  link_directories(/usr/local/lib)

  find_package(yaml-cpp REQUIRED)
  include_directories(${YAML_CPP_INCLUDE_DIRS})
  link_directories(${YAML_CPP_LIBRARIES})
  set(DEPS ${DEPS} "-lyaml-cpp")

  find_package(OpenCV REQUIRED)
  include_directories(${OpenCV_INCLUDE_DIRS})
  set(DEPS ${DEPS} ${OpenCV_LIBS})
	
  add_executable(${DEMO_NAME} src/${DEMO_NAME}.cc)
  target_link_libraries(${DEMO_NAME} ${DEPS})
else()
  include_directories("${PADDLE_LIB}/paddle/fluid/inference")
  include_directories("${PADDLE_LIB}/paddle/include")
  link_directories("${PADDLE_LIB}/paddle/fluid/inference")
  
  SET(OPENCV_DIR "" CACHE PATH "Location of libraries")
  find_package(OpenCV REQUIRED PATHS ${OPENCV_DIR}/build/ NO_DEFAULT_PATH)
  include_directories(${OpenCV_INCLUDE_DIRS})
  set(DEPS ${DEPS} ${OpenCV_LIBS})
	
  add_executable(${DEMO_NAME} src/${DEMO_NAME}.cc)
  ADD_DEPENDENCIES(${DEMO_NAME} ext-yaml-cpp)
  message("DEPS:" ${DEPS})
  target_link_libraries(${DEMO_NAME} ${DEPS})
endif()

if(WIN32 AND USE_TENSORRT)
  SET(TENSORRT_DLL "" CACHE PATH "Location of TensorRT .dll")
  add_custom_command(TARGET ${DEMO_NAME} POST_BUILD
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${TENSORRT_DLL}/nvinfer.dll ./nvinfer.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${TENSORRT_DLL}/nvinfer.dll ./release/nvinfer.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${TENSORRT_DLL}/nvinfer_plugin.dll ./nvinfer_plugin.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${TENSORRT_DLL}/nvinfer_plugin.dll ./release/nvinfer_plugin.dll
  )
endif()

if(WIN32 AND WITH_MKL)
  add_custom_command(TARGET ${DEMO_NAME} POST_BUILD
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/mklml.dll ./mklml.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/libiomp5md.dll ./libiomp5md.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mkldnn/lib/mkldnn.dll ./mkldnn.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mkldnn/lib/mkldnn.dll ./release/mkldnn.dll
  )
endif()

if(WIN32 AND NOT WITH_MKL)
  add_custom_command(TARGET ${DEMO_NAME} POST_BUILD
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/openblas/lib/openblas.dll ./openblas.dll
    COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/openblas/lib/openblas.dll ./release/openblas.dll
  )
endif()

if (WIN32)
    add_custom_command(TARGET ${DEMO_NAME} POST_BUILD
        COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/onnxruntime/lib/onnxruntime.dll ./onnxruntime.dll
        COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/paddle2onnx/lib/paddle2onnx.dll ./paddle2onnx.dll
        COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/onnxruntime/lib/onnxruntime.dll ./release/onnxruntime.dll
        COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/paddle2onnx/lib/paddle2onnx.dll ./release/paddle2onnx.dll
        COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/paddle/lib/${PADDLE_LIB_NAME}.dll ./release/${PADDLE_LIB_NAME}.dll
    )
endif()

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/1966297.html

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!

相关文章

汽车EDI中的常见术语以及流程详解

汽车EDI常见术语 EDI —— 电子数据交换3PL(第三方物流) —— 外包仓库/运输供应商。 一般用于售后市场,但偶尔也用于原始设备制造商。也可由原始设备制造商和售后市场公司用来分销产品。Aftermarket(后市场) —— 经…

详细阐述大模型微调过程、方法、案例

大模型微调 大模型微调(Fine-tuning)的定义是:在已经预训练好的大型深度学习模型基础上,使用新的、特定任务相关的数据集对模型进行进一步训练的过程。这种微调技术的主要目的是使模型能够适应新的、具体的任务或领域&#xff0c…

透明屏幕方案介绍

透明屏幕方案主要涉及透明显示屏的技术原理、应用场景、优势以及未来发展趋势等方面。以下是对透明屏幕方案的详细介绍: 一、技术原理 透明屏幕,特别是透明LED显示屏和透明OLED显示屏,采用了先进的技术原理来实现其独特的显示效果。 透明LED显…

牛客 KY11.二叉树遍历

牛客 KY11.二叉树遍历 思路: 我们接收字符串以后,创建一个二叉树结构体,然后就可以开始建立树,如果是字符就malloc新的结点去存储,是**#就返回空**,最后用递归以根左右的顺序创建结点。树建立完成后&#x…

Linux中防火墙实战之Web服务器和ssh远程服务配置指南

🏡作者主页:点击! 🐧Linux基础知识(初学):点击! 🐧Linux高级管理防护和群集专栏:点击! 🔐Linux中firewalld防火墙:点击! ⏰️创作…

螺钉柱的设计

如果螺钉柱参数设置不合理,可能导致螺钉柱滑牙、爆裂、断裂、螺丝断裂、螺钉头磨损、螺钉攻入费力等问题 具体参数可以参照下表 螺丝柱设计尺寸: 螺丝柱设计要点: 频繁拆卸的注意事项: 自攻牙螺丝柱不宜频繁拆卸,因…

AI大模型应用(2)ChatGLM3本地部署及其在alpaca_zh数据集上的低精度微调

AI大模型应用(2)ChatGLM3部署及其在alpaca_zh数据集上的低精度微调 我们之前已经了解了HuggingFace中peft库的几种高效微调方法。 参数高效微调PEFT(一)快速入门BitFit、Prompt Tuning、Prefix Tuning 参数高效微调PEFT(二)快速入门P-Tuning、P-Tuning V2 参数高效微调PEFT…

C++第三十弹---C++继承机制深度剖析(中)

✨个人主页: 熬夜学编程的小林 💗系列专栏: 【C语言详解】 【数据结构详解】【C详解】 目录 1、派生类的默认成员函数 1.1、派生类的构造函数 1.2、派生类的拷贝构造函数 1.3、派生类的赋值重载 1.4、派生类的析构函数 2、继承与友元 …

TL3568编译Kernel内核 make tl3568-evm.img -j16报错 ‘arch/arm64/boot/Image.lz4‘ failed

在编译Kernel时,遇到报错内容: /bin/sh: lz4c: command not found arch/arm64/boot/Makefile:31: recipe for target arch/arm64/boot/Image.lz4 failed make[1]: *** [arch/arm64/boot/Image.lz4] Error 1 arch/arm64/Makefile:139: recipe for target …

科普文:【支持信创、宣传国产】Alibaba Dragonwell JVM性能提升50%

4月5日,阿里云开放了新一代ECS实例的邀测[1],Alibaba Dragonwell也在新ECS上进行了极致的优化。相比于之前的dragonwell_11.0.8.3版本,即将发布的dragonwell_11.0.11.6在SPECjbb2015[2] composite模式测试中,系统吞吐量max-jOPS提…

【算法】插值查找(对二分查找的优化)

引言 在二分查找中,对于相对较大的数或较小的数来说,查询效率是很低的,我们希望程序可以自适应待查询的数,使用插值算法 插值查找原理 1.插值查找算法类似于二分查找,不同的是插值查找每次从自适应 mid 处开始查找 2…

Airtest封装的Tidevice接口有多好用(二)

一、前言 上节课我们分享了一批Airtest封装的Tidevice接口,是有关获取设备信息的,还没看到的同学可以戳这里复习一下。那么本周我们继续来看一下Airtest还封装了哪些Tidevice的接口吧~ 二、Airtest封装的Tidevice接口 2.1 list_app(udid ,app_typeuse…

早得农元早享“富”!农元又双叒叕涨了!

农元升值设定的唯一途径,仅随着用户在平台每次的购物而升值,未来农元的价值升值甚至会达到几千、上万元人民币,真正实现了购物乐趣与财富增长的双重盛宴,让每一位平台用户都能享受到数字经济时代带来的红利。 快从消费者变为经营者…

警惕!六西格玛培训中不可不知的六大陷阱

近年来,随着六西格玛的普及,一些常见的培训陷阱也逐渐浮出水面,让不少求学者误入歧途。本文,深圳天行健企业管理咨询公司旨在为大家揭示六西格玛培训中的六大常见陷阱,真正掌握六西格玛的精髓。 陷阱一:速成…

KMP入门与算法题实践

基础知识 参考视频 下面是两个b站上个人借鉴学习的视频 第一个视频用来快速理解KMP: 【最浅显易懂的 KMP 算法讲解】 https://www.bilibili.com/video/BV1AY4y157yL/?share_sourcecopy_web&vd_sourced124eda224bf54d0e3ab795c0b89dbb0 第二、三个视频用来理…

vue3学习day01-vue3的优势、新的脚手架工具create-vue、创建vue3项目、vue3的项目文件内容、插件变化

1、vue3的优势 (1)更易维护:组合式api,更好的TypeScript支持 (2)更快的速度:重写diff算法,模版编译优化,更高效的组件化 (3)更小的体积&#x…

MES系统:生产实时监控与智能反馈,驱动制造业智能化升级

MES系统(Manufacturing Execution System,制造执行系统)通过集成多种技术手段和管理模块,实现了生产过程的实时监控与反馈。以下是实时监控与反馈具体实现的详细分析: 一、实时监控 1. 数据采集 传感器与设备集成&am…

nrm: npm 镜像源管理工具

nrm 是 “npm registry manager” 的缩写,是一个 npm 镜像源管理工具,用于在不同的 npm 镜像源之间快速切换,帮助开发者根据需要选择不同的源来加速包的下载或解决网络问题。 常用命令 详细介绍 以下是 nrm 的一些主要特性和用法&#xff1…

精美UI三方用户中心 新版QRuser用户中心主题 | 魔方财务模板

内容目录 一、详细介绍二、效果展示1.部分代码2.效果图展示 三、学习资料下载 一、详细介绍 新版QRuser用户中心主题 | 魔方财务模板 本主题支持魔方财务3.5.7版本!可自由切换魔方财务3.5.7版本与其他版本。 本主题基于官方default开发,主要面向企业&…

Java语言程序设计——篇十一(1)

🌿🌿🌿跟随博主脚步,从这里开始→博主主页🌿🌿🌿 欢迎大家:这里是CSDN,我的学习笔记、总结知识的地方,喜欢的话请三连,有问题可以私信&#x1f33…