浏览代码

初始化仓库

Jeston 1 年之前
父节点
当前提交
d1360c0e44
共有 39 个文件被更改,包括 14838 次插入0 次删除
  1. 123 0
      CMakeLists.txt
  2. 539 0
      NebulaSDK/AArch64/include/VzenseNebula_api.h
  3. 28 0
      NebulaSDK/AArch64/include/VzenseNebula_define.h
  4. 109 0
      NebulaSDK/AArch64/include/VzenseNebula_enums.h
  5. 206 0
      NebulaSDK/AArch64/include/VzenseNebula_types.h
  6. 13 0
      NebulaSDK/AArch64/lib/Config/DS77CLite_0F.json
  7. 13 0
      NebulaSDK/AArch64/lib/Config/DS77CPro_0E.json
  8. 8 0
      NebulaSDK/AArch64/lib/Config/DS77Lite_11.json
  9. 8 0
      NebulaSDK/AArch64/lib/Config/DS77Pro_10.json
  10. 13 0
      NebulaSDK/AArch64/lib/Config/DS86_12.json
  11. 13 0
      NebulaSDK/AArch64/lib/Config/DS87_13.json
  12. 2930 0
      communication/communication.pb.cc
  13. 2196 0
      communication/communication.pb.h
  14. 76 0
      communication/communication.proto
  15. 214 0
      communication/protocol_conversion.hpp
  16. 41 0
      communication/rabbitmq_communication.cpp
  17. 49 0
      communication/rabbitmq_communication.h
  18. 142 0
      detect/common.hpp
  19. 658 0
      detect/stream/def.pb.cc
  20. 393 0
      detect/stream/def.pb.h
  21. 17 0
      detect/stream/def.proto
  22. 1 0
      detect/stream/proto.sh
  23. 69 0
      detect/wheel-detector.cpp
  24. 25 0
      detect/wheel-detector.h
  25. 319 0
      detect/yolov8-seg.cpp
  26. 53 0
      detect/yolov8-seg.h
  27. 7 0
      main.cpp
  28. 183 0
      model/inference.cpp
  29. 50 0
      model/inference.h
  30. 29 0
      tof3DMain.cpp
  31. 2888 0
      tof3d_config.pb.cc
  32. 2105 0
      tof3d_config.pb.h
  33. 61 0
      tof3d_config.proto
  34. 23 0
      tof3d_manager.cpp
  35. 19 0
      tof3d_manager.h
  36. 12 0
      tof3d_manager.prototxt
  37. 351 0
      velodyne_manager.prototxt
  38. 721 0
      vzense/device_tof3d.cpp
  39. 133 0
      vzense/device_tof3d.h

+ 123 - 0
CMakeLists.txt

@@ -0,0 +1,123 @@
+set(SON_PROJECT_NAME tof3d)
+
+set(CMAKE_CUDA_ARCHITECTURES 60 61 62 70 72 75 86)
+set(CMAKE_CUDA_COMPILER /usr/local/cuda/bin/nvcc)
+project(${SON_PROJECT_NAME} LANGUAGES CXX CUDA)
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -O3")
+set(CMAKE_CXX_STANDARD 17)
+#set(CMAKE_BUILD_TYPE Release)
+option(CUDA_USE_STATIC_CUDA_RUNTIME OFF)
+
+# CUDA
+find_package(CUDA REQUIRED)
+message(STATUS "CUDA Libs: \n${CUDA_LIBRARIES}\n")
+get_filename_component(CUDA_LIB_DIR ${CUDA_LIBRARIES} DIRECTORY)
+message(STATUS "CUDA Headers: \n${CUDA_INCLUDE_DIRS}\n")
+
+# OpenCV
+find_package(OpenCV REQUIRED)
+message(STATUS "OpenCV Libs: \n${OpenCV_LIBS}\n")
+message(STATUS "OpenCV Libraries: \n${OpenCV_LIBRARIES}\n")
+message(STATUS "OpenCV Headers: \n${OpenCV_INCLUDE_DIRS}\n")
+
+# TensorRT
+set(TensorRT_INCLUDE_DIRS /usr/include/aarch64-linux-gnu)
+set(TensorRT_LIBRARIES /usr/lib/aarch64-linux-gnu)
+message(STATUS "TensorRT Libs: \n${TensorRT_LIBRARIES}\n")
+message(STATUS "TensorRT Headers: \n${TensorRT_INCLUDE_DIRS}\n")
+
+list(APPEND INCLUDE_DIRS
+        ${CUDA_INCLUDE_DIRS}
+        ${OpenCV_INCLUDE_DIRS}
+        ${TensorRT_INCLUDE_DIRS}
+        include
+)
+
+list(APPEND ALL_LIBS
+        ${CUDA_LIBRARIES}
+        ${CUDA_LIB_DIR}
+        ${TensorRT_LIBRARIES}
+)
+
+message("========== Load son project ${SON_PROJECT_NAME} ==========" )
+option(OPTION_COMMUNICATION_WITH_PLC "plc通信" OFF)
+message("<=${SON_PROJECT_NAME}=> OPTION_COMMUNICATION_WITH_PLC: " ${OPTION_COMMUNICATION_WITH_PLC})
+
+# if(NOT ENABLE_LIBRARY_PCL)
+#     message(FATAL_ERROR "ENABLE_LIBRARY_PCL OFF")
+# endif ()
+
+if (NOT ENABLE_LIBRARY_GOOGLE_LOG)
+    message(FATAL_ERROR "ENABLE_LIBRARY_GOOGLE_LOG OFF")
+endif ()
+
+# if (NOT ENABLE_LIBRARY_JSON)
+#     message(FATAL_ERROR "ENABLE_LIBRARY_JSON OFF")
+# endif ()
+
+# if (NOT ENABLE_LIBRARY_THREAD)
+#     message(FATAL_ERROR "Not enable ENABLE_LIBRARY_THREAD")
+# endif ()
+
+# 获取系统架构
+message("SYSTEM: ${CMAKE_SYSTEM_PROCESSOR}")
+
+if (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64")
+        set(NebulaSDK_PATH "${CMAKE_CURRENT_LIST_DIR}/NebulaSDK/AArch64")
+endif()
+message("NebulaSDK_PATH: ${NebulaSDK_PATH}")
+
+include_directories(
+        ${INCLUDE_DIRS}
+        ${CMAKE_CURRENT_LIST_DIR}
+        ${PCL_INCLUDE_DIRS}
+        ${OpenCV_INCLUDE_DIRS}
+        ${PROTOBUF_INCLUDE_DIR}
+        ${NebulaSDK_PATH}/include
+)
+
+aux_source_directory(${CMAKE_CURRENT_LIST_DIR}/communication communication)
+aux_source_directory(${CMAKE_CURRENT_LIST_DIR}/vzense vzense)
+aux_source_directory(${CMAKE_CURRENT_LIST_DIR}/detect detect)
+
+set(SON_PROJECT_SOURCE_LIST ${SON_PROJECT_SOURCE_LIST}
+        ${communication}
+        ${vzense}
+        ${detect}
+        ${CMAKE_CURRENT_LIST_DIR}/tof3d_manager.h
+        ${CMAKE_CURRENT_LIST_DIR}/tof3d_manager.cpp
+        ${CMAKE_CURRENT_LIST_DIR}/tof3d_config.pb.h
+        ${CMAKE_CURRENT_LIST_DIR}/tof3d_config.pb.cc
+        ${CMAKE_CURRENT_LIST_DIR}/tof3DMain.cpp
+)
+
+set(SON_PROJECT_DEPEND_LIST ${SON_PROJECT_DEPEND_LIST}
+        liblog
+        libthread
+        librabbitmq
+        libmessage
+        ${NebulaSDK_PATH}/lib/libNebula_api.so
+        ${PROTOBUF_LIBRARIES}
+        ${OpenCV_LIBRARIES}
+        nvinfer
+        nvinfer_plugin
+        ${ALL_LIBS}
+)
+
+add_executable(${SON_PROJECT_NAME} ${SON_PROJECT_SOURCE_LIST})
+target_link_libraries(${SON_PROJECT_NAME} ${SON_PROJECT_DEPEND_LIST})
+target_compile_definitions(${SON_PROJECT_NAME} PRIVATE PROJECT_NAME="${SON_PROJECT_NAME}")
+
+if(${OpenCV_VERSION} VERSION_GREATER_EQUAL 4.7.0)
+    message(STATUS "Build with -DBATCHED_NMS")
+    add_definitions(-DBATCHED_NMS)
+endif()
+
+install(TARGETS ${SON_PROJECT_NAME}
+        LIBRARY DESTINATION lib  # 动态库安装路径
+        ARCHIVE DESTINATION lib  # 静态库安装路径
+        RUNTIME DESTINATION bin  # 可执行文件安装路径
+        PUBLIC_HEADER DESTINATION include  # 头文件安装路径
+)
+

+ 539 - 0
NebulaSDK/AArch64/include/VzenseNebula_api.h

@@ -0,0 +1,539 @@
+#ifndef VZENSEDS_API_H
+#define VZENSEDS_API_H
+
+/**
+* @file VzenseDS_api.h
+* @brief Vzense API header file.
+* Copyright (c) 2019-2022 Vzense Interactive, Inc.
+*/
+
+/*! \mainpage VzenseDS API Documentation
+*
+* \section intro_sec Introduction
+*
+* Welcome to the VzenseDS API documentation. This documentation enables you to quickly get started in your development efforts to programmatically interact with the Vzense CW ToF Camera (eg:DS77).
+*/
+
+#include "VzenseNebula_define.h"
+
+/**
+* @brief         Initializes the API on the device. This function must be invoked before any other Vzense APIs.
+* @return        ::VzRetOK if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_Initialize();
+
+/**
+* @brief         Shuts down the API on the device and clears all resources allocated by the API. After invoking this function, no other Vzense APIs can be invoked.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_Shutdown();
+
+/**
+* @brief         Gets the version of SDK.
+* @return        Returns sdk version
+*/
+VZENSE_C_API_EXPORT const char* VZ_GetSDKVersion();
+
+/**
+* @brief         Returns the number of camera devices currently connected.
+* @param[out]    pDeviceCount    Pointer to a 32-bit integer variable in which to return the device count.
+* @return        ::VzRetOK       if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetDeviceCount(uint32_t* pDeviceCount);
+
+/**
+* @brief         Returns the info of the deviceIndex camera device.
+* @param[in]     deviceIndex    The index of the device to open. Device indices range from 0 to device count - 1.
+* @param[out]    pDevicesInfo       Pointer to a buffer in which to store the device info.
+* @return        ::VzRetOK      if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetDeviceInfo(uint32_t deviceIndex, VzDeviceInfo* pDevicesInfo);
+
+/**
+* @brief         Returns the info lists of the deviceCount camera devices.
+* @param[in]     deviceCount         the number of camera devices.
+* @param[out]    pDevicesInfoList    Pointer to a buffer in which to store the devices list infos.
+* @return        ::VzRetOK           if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetDeviceInfoList(uint32_t deviceCount, VzDeviceInfo* pDevicesInfoList);
+
+/**
+* @brief         Opens the device specified by <code>sn</code>. The device must be subsequently closed using VZ_CloseDevice().
+* @param[in]     pURI         the uri of the device. See ::VzDeviceInfo for more information.
+* @param[out]    pDevice     the handle of the device on which to open.
+* @return:       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_OpenDeviceByUri(const char* pURI, VzDeviceHandle* pDevice);
+
+/**
+* @brief         Opens the device specified by <code>alias</code>. The device must be subsequently closed using VZ_CloseDevice().
+* @param[in]     pAlias       the alias of the device. See ::VzDeviceInfo for more information.
+* @param[out]    pDevice     the handle of the device on which to open.
+* @return:       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_OpenDeviceByAlias(const char* pAlias, VzDeviceHandle* pDevice);
+
+/**
+* @brief         Opens the device specified by <code>ip</code>. The device must be subsequently closed using VZ_CloseDevice().
+* @param[in]     pIP          the ip of the device. See ::VzDeviceInfo for more information.
+* @param[out]    pDevice     the handle of the device on which to open.
+* @return:       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_OpenDeviceByIP(const char* pIP, VzDeviceHandle* pDevice);
+
+/**
+* @brief         Closes the device specified by <code>device</code> that was opened using VZ_OpenDevice.
+* @param[in]     pDevice       The handle of the device to close.
+* @return:       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_CloseDevice(VzDeviceHandle* pDevice);
+
+/**
+* @brief         Starts capturing the image stream indicated by <code>device</code>. Invoke VZ_StopStream() to stop capturing the image stream.
+* @param[in]     device          The handle of the device on which to start capturing the image stream.                        
+* @return        ::VzRetOK if    the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_StartStream(VzDeviceHandle device);
+
+/**
+* @brief         Stops capturing the image stream on the device specified by <code>device</code>. that was started using VZ_StartStream.
+* @param[in]     device       The handle of the device on which to stop capturing the image stream.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_StopStream(VzDeviceHandle device);
+
+/**
+* @brief         Captures the next image frame from the device specified by <code>device</code>. This API must be invoked before capturing frame data using VZ_GetFrame().
+* @param[in]     device         The handle of the device on which to read the next frame.
+* @param[in]     waitTime       The unit is millisecond, the value is in the range (0,65535).
+*                               You can change the value according to the frame rate. For example,the frame rate is 30, so the theoretical waittime interval is 33ms, but if set the time value is 20ms,
+*                               it means the max wait time is 20 ms when capturing next frame, so when call the VZ_GetFrameReady, it may return VzRetGetFrameReadyTimeOut(-11).
+*                               So the recommended value is 2 * 1000/ FPS.
+* @param[out]    pFrameReady    Pointer to a buffer in which to store the signal on which image is ready to be get.
+* @return        ::VzRetOK      if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetFrameReady(VzDeviceHandle device, uint16_t waitTime, VzFrameReady* pFrameReady);
+
+/**
+* @brief         Returns the image data for the current frame from the device specified by <code>device</code>. Before invoking this API, invoke VZ_GetFrameReady() to capture one image frame from the device.
+* @param[in]     device       The handle of the device to capture an image frame from.
+* @param[in]     frameType    The image frame type.
+* @param[out]    pVzFrame     Pointer to a buffer in which to store the returned image data.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetFrame(VzDeviceHandle device, VzFrameType frameType, VzFrame* pVzFrame);
+
+/**
+* @brief        Set the working mode of the camera.
+* @param[in]    device      The handle of the device
+* @param[in]    mode        The work mode of camera. For ActiveMode, set the Time filter default true, for SlaveMode, set the Time filter default false.
+* @return       ::VzRetOK   if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetWorkMode(VzDeviceHandle device, VzWorkMode mode);
+
+/**
+* @brief        Get the working mode of the camera.
+* @param[in]    device      The handle of the device
+* @param[in]    pMode       The work mode of camera.
+* @return       ::VzRetOK   if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetWorkMode(VzDeviceHandle device, VzWorkMode* pMode);
+
+/**
+* @brief        Triggering a frame of image is only useful if the camera is in SoftwareTriggerMode
+* @param[in]    device      The handle of the device.
+* @return       ::VzRetOK   if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetSoftwareSlaveTrigger(VzDeviceHandle device);
+
+/**
+* @brief         Returns the internal intrinsic and distortion coefficient parameters from the device specified by <code>device</code>.
+* @param[in]     device                        The handle of the device from which to get the internal parameters. 
+* @param[in]     sensorType                    The type of sensor (depth or color) from which to get parameter information. Pass in the applicable value defined by ::VzSensorType.
+* @param[out]    pSensorIntrinsicParameters    Pointer to a VzSensorIntrinsicParameters variable in which to store the parameter values.
+* @return        ::VzRetOK                     if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetSensorIntrinsicParameters(VzDeviceHandle device, VzSensorType sensorType, VzSensorIntrinsicParameters* pSensorIntrinsicParameters);
+
+/**
+* @brief         Returns the camera rotation and translation coefficient parameters from the device specified by <code>device</code>.
+* @param[in]     device                        The handle of the device from which to get the extrinsic parameters. 
+* @param[out]    pSensorExtrinsicParameters    Pointer to a ::VzSensorExtrinsicParameters variable in which to store the parameters.
+* @return        ::VzRetOK                     if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetSensorExtrinsicParameters(VzDeviceHandle device, VzSensorExtrinsicParameters* pSensorExtrinsicParameters);
+
+/**
+* @brief         Gets the firmware version number.
+* @param[in]     device              The handle of the device on which to set the pulse count.
+* @param[in]     pFirmwareVersion    Pointer to a variable in which to store the returned fw value.
+* @param[in]     length              The maximum length is 64 bytes.
+* @return        ::VzRetOK          if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetFirmwareVersion(VzDeviceHandle device, char* pFirmwareVersion, int length);
+
+/**
+* @brief         Gets the MAC from the device specified by <code>device</code>.
+* @param[in]     device         The handle of the device.
+* @param[out]    pMACAddress    Pointer to a buffer in which to store the device MAC address. the buffer default size is 18, and the last buffer set '\0'.
+* @return        ::VzRetOK      if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetDeviceMACAddress(VzDeviceHandle device, char* pMACAddress);
+
+/**
+* @brief        Sets the device GMM gain on a device.
+* @param[in]    device       The handle of the device on which to set the GMM gain.
+* @param[in]    gmmgain      The GMM gain value to set. See ::VzGMMGain for more information.The GMM gain value is in the range [0,255].
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetIRGMMGain(VzDeviceHandle device, uint8_t gmmgain);
+
+/**
+* @brief        Returns the the device's GMM gain.
+* @param[in]    device       The handle of the device from which to get the GMM gain.
+* @param[out]   pGmmgain      Pointer to a variable in which to store the returned GMM gain.
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetIRGMMGain(VzDeviceHandle device, uint8_t* pGmmgain);
+
+/**
+* @brief         Sets the color image pixel format on the device specified by <code>device</code>. Currently only RGB and BGR formats are supported.
+* @param[in]     device         The handle of the device to set the pixel format. 
+* @param[in]     pixelFormat    The color pixel format to use. Pass in one of the values defined by ::VzPixelFormat. Currently only <code>VzPixelFormatRGB888</code> and <code>VzPixelFormatBGR888</code> are supported.
+* @return        ::VzRetOK      if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetColorPixelFormat(VzDeviceHandle device, VzPixelFormat pixelFormat);
+
+/**
+* @brief        Sets the color frame Resolution.
+* @param[in]    device       The handle of the device.
+* @param[in]    w            The width of color image
+* @param[in]    h            The height of color image
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetColorResolution(VzDeviceHandle device, int w, int h);
+
+/**
+* @brief        Returns the the color frame Resolution.
+* @param[in]    device         The handle of the device.
+* @param[out]    pW            The width of color image
+* @param[out]    pH            The height of color image
+* @return       ::VzRetOK      if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetColorResolution(VzDeviceHandle device, int* pW, int* pH);
+
+/**
+* @brief        Gets a list of image resolutions supported by Sensor
+* @param[in]    type           The sensor type
+* @param[out]    pList         List of supported resolutions
+* @return       ::VzRetOK      if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VzGetSupportedResolutionList(VzDeviceHandle device, VzSensorType type, VzResolutionList* pList);
+
+/**
+* @brief         Sets the ToF frame rate.The interface takes a long time, about 500 ms.
+* @param[in]     device       The handle of the device on which to set the framerate.
+* @param[in]     value        The rate value, in range [1,25].
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetFrameRate(VzDeviceHandle device, int value);
+/**
+* @brief         Gets the ToF frame rate.
+* @param[in]     device       The handle of the device on which to get the framerate.
+* @param[in]     pValue       The rate value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetFrameRate(VzDeviceHandle device, int* pValue);
+
+/**
+* @brief        Set the exposure mode of sensor.
+* @param[in]    device          The handle of the device on which to set the exposure control mode.
+* @param[in]    sensorType      The type of sensor (depth or color) from which to get parameter information. Pass in the applicable value defined by ::VzSensorType.
+* @param[in]    exposureType    the exposure control mode.
+* @return       ::VzRetOK       if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetExposureControlMode(VzDeviceHandle device, VzSensorType sensorType, VzExposureControlMode controlMode);
+
+/**
+* @brief         Get the exposure mode of sensor.
+* @param[in]     device           The handle of the device on which to get the exposure control mode.
+* @param[in]     sensorType       The type of sensor (depth or color) from which to get parameter information. Pass in the applicable value defined by ::VzSensorType.
+* @param[out]    pControlMode     the exposure control mode.
+* @return        ::VzRetOK        if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetExposureControlMode(VzDeviceHandle device, VzSensorType sensorType, VzExposureControlMode* pControlMode);
+
+/**
+* @brief        Set the exposure time of sensor.
+* @param[in]    device          The handle of the device on which to set the exposure time  in microseconds.
+* @param[in]    sensorType      The type of sensor (depth or color) from which to get parameter information. Pass in the applicable value defined by ::VzSensorType.
+* @param[in]    exposureTime    the exposure time.
+* @return       ::VzRetOK       if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetExposureTime(VzDeviceHandle device, VzSensorType sensorType, VzExposureTimeParams exposureTime);
+
+/**
+* @brief         Get the exposure time of sensor.
+* @param[in]     device           The handle of the device on which to get the exposure time in microseconds.
+* @param[in]     sensorType       The type of sensor (depth or color) from which to get parameter information. Pass in the applicable value defined by ::VzSensorType.
+* @param[out]    pExposureTime    the exposure time.
+* @return        ::VzRetOK        if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetExposureTime(VzDeviceHandle device, VzSensorType sensorType, VzExposureTimeParams* pExposureTime);
+
+/**
+* @brief        Set the parameters of the Time filter.
+* @param[in]    device       The handle of the device
+* @param[in]    params       Pointer to a variable in which to store the parameters.
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/ 
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetTimeFilterParams(VzDeviceHandle device, VzTimeFilterParams params);
+
+/**
+* @brief         Get the parameters of the Time Filter feature.
+* @param[in]     device       The handle of the device
+* @param[out]    pParams      Pointer to a variable in which to store the returned value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetTimeFilterParams(VzDeviceHandle device, VzTimeFilterParams *pParams);
+
+/**
+* @brief         Set the parameters of the Confidence filter.
+* @param[in]     device       The handle of the device
+* @param[out]    params       Pointer to a variable in which to store the parameters.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetConfidenceFilterParams(VzDeviceHandle device, VzConfidenceFilterParams params);
+
+/**
+* @brief         Get the parameters of the ConfidenceFilter feature.
+* @param[in]     device       The handle of the device
+* @param[out]    pParams      Pointer to a variable in which to store the returned value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetConfidenceFilterParams(VzDeviceHandle device, VzConfidenceFilterParams *pParams);
+
+/**
+* @brief        Set the parameters of the FlyingPixel filter.
+* @param[in]    device       The handle of the device.
+* @param[out]   params       Pointer to a variable in which to store the parameters.
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetFlyingPixelFilterParams(VzDeviceHandle device, const VzFlyingPixelFilterParams params);
+
+/**
+* @brief         Get the parameters of the Confidence filter.
+* @param[in]     device       The handle of the device
+* @param[out]    pParams      Pointer to a variable in which to store the returned value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetFlyingPixelFilterParams(VzDeviceHandle device, VzFlyingPixelFilterParams* params);
+
+/**
+* @brief        Enables or disables the FillHole filter
+* @param[in]    device       The handle of the device.
+* @param[in]    bEnabled     Set to <code>true</code> to enable the feature or <code>false</code> to disable the feature.
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetFillHoleFilterEnabled(VzDeviceHandle device, bool bEnabled);
+
+/**
+* @brief         Returns the Boolean value of whether the FillHole Filter feature is enabled or disabled.
+* @param[in]     device       The handle of the device
+* @param[out]    pEnabled     Pointer to a variable in which to store the returned Boolean value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetFillHoleFilterEnabled(VzDeviceHandle device, bool *pEnabled);
+
+/**
+* @brief        Enables or disables the Spatial filter
+* @param[in]    device       The handle of the device.
+* @param[in]    bEnabled     Set to <code>true</code> to enable the feature or <code>false</code> to disable the feature.
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetSpatialFilterEnabled(VzDeviceHandle device, bool bEnabled);
+
+/**
+* @brief         Returns the Boolean value of whether the Spatial Filter feature is enabled or disabled.
+* @param[in]     device       The handle of the device
+* @param[out]    pEnabled     Pointer to a variable in which to store the returned Boolean value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetSpatialFilterEnabled(VzDeviceHandle device, bool *pEnabled);
+
+/**
+* @brief         Enables or disables transforms a color image into the geometry of the depth sensor. When enabled, VZ_GetFrame() can\n
+*                be invoked passing ::VzTransformedColorFrame as the frame type for get a color image which each pixel matches the \n
+*                corresponding pixel coordinates of the depth sensor. The resolution of the transformed color frame is the same as that\n
+*                of the depth image.
+* @param[in]     device       The handle of the device on which to enable or disable mapping.
+* @param[in]     bEnabled     Set to <code>true</code> to enable the feature or <code>false</code> to disable the feature.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetTransformColorImgToDepthSensorEnabled(VzDeviceHandle device, bool bEnabled);
+
+/**
+* @brief         Returns the Boolean value of whether the transformed of the color image to depth sensor space feature is enabled or disabled.
+* @param[in]     device       The handle of the device on which to enable or disable the feature.
+* @param[out]    bEnabled     Pointer to a variable in which to store the returned Boolean value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetTransformColorImgToDepthSensorEnabled(VzDeviceHandle device, bool *bEnabled);
+
+/**
+* @brief         Enables or disables transforms the depth map into the geometry of the color sensor. When enabled, VZ_GetFrame() can\n
+*                be invoked passing ::VzTransformedDepthFrame as the frame type for get a depth image which each pixel matches the \n
+*                corresponding pixel coordinates of the color sensor. The resolution of the transformed depth frame is the same as that\n
+*                of the color image.
+* @param[in]     device       The handle of the device on which to enable or disable mapping.
+* @param[in]     bEnabled     Set to <code>true</code> to enable the feature or <code>false</code> to disable the feature.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetTransformDepthImgToColorSensorEnabled(VzDeviceHandle device, bool bEnabled);
+
+/**
+* @brief         Returns the Boolean value of whether the transformed of the depth image to color space feature is enabled or disabled.
+* @param[in]     device       The handle of the device on which to enable or disable the feature.
+* @param[out]    bEnabled     Pointer to a variable in which to store the returned Boolean value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetTransformDepthImgToColorSensorEnabled(VzDeviceHandle device, bool *bEnabled);
+
+/**
+* @brief         Returns the point value of the frame that the mapping of the depth image to Color space.
+* @param[in]     device           The handle of the device on which to enable or disable the feature.
+* @param[in]     pointInDepth     The point in depth frame.
+* @param[in]     colorSize        The size(x = w,y = h) of color frame.
+* @param[out]    pPointInColor    The point in the color frame.
+* @return        ::VzRetOK        if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_TransformedDepthPointToColorPoint(const VzDeviceHandle device, const VzDepthVector3 depthPoint, const VzVector2u16 colorSize, VzVector2u16* pPointInColor);
+
+/**
+* @brief         Converts the input points from depth coordinate space to world coordinate space.
+* @param[in]     device          The handle of the device on which to perform the operation.
+* @param[in]     pDepthVector    Pointer to a buffer containing the x, y, and z values of the depth coordinates to be converted. \n
+*                                x and y are measured in pixels, where 0, 0 is located at the top left corner of the image. \n
+*                                z is measured in millimeters, based on the ::VzPixelFormat depth frame.
+* @param[out]    pWorldVector    Pointer to a buffer in which to output the converted x, y, and z values of the world coordinates, measured in millimeters.
+* @param[in]     pointCount      The number of points to convert.
+* @param[in]     pSensorParam    The intrinsic parameters for the depth sensor. See ::VzSensorIntrinsicParameters.
+* @return        ::VzRetOK       if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_ConvertDepthToPointCloud(VzDeviceHandle device, VzDepthVector3* pDepthVector, VzVector3f* pWorldVector, int32_t pointCount, VzSensorIntrinsicParameters* pSensorParam);
+
+/**
+* @brief         Converts the input Depth frame from depth coordinate space to world coordinate space on the device. Currently supported depth image types are VzDepthFrame and VzTransformDepthImgToColorSensorFrame.
+* @param[in]     device          The handle of the device on which to perform the operation.
+* @param[in]     pDepthFrame      The depth frame.
+* @param[out]    pWorldVector    Pointer to a buffer in which to output the converted x, y, and z values of the world coordinates, measured in millimeters. The length of pWorldVector must is (VzFrame.width * VzFrame.height).
+* @return        ::VzRetOK       if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_ConvertDepthFrameToPointCloudVector(VzDeviceHandle device, const VzFrame* pDepthFrame, VzVector3f* pWorldVector);
+/**
+* @brief        Sets hotplug status callback function
+* @param[in]    pCallback    Pointer to the callback function. See ::PtrHotPlugStatusCallback 
+* @param[in]    pUserData    Pointer to the user data. See ::PtrHotPlugStatusCallback
+* @return       ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetHotPlugStatusCallback(PtrHotPlugStatusCallback pCallback, const void* pUserData);
+
+/**
+* @brief        Reboot the camera.
+* @param[in]    device          The handle of the device
+* @return       ::VzRetOK       if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_RebootDevie(VzDeviceHandle device);
+
+/**
+* @brief        Set the corresponding property value for the device
+* @param[in]    device          The handle of the device from which to set the property value.
+* @param[in]    propertyKey     The type of property to set on the device.
+* @param[in]    pData           Pointer to a buffer containing the property value.
+* @param[in]    dataSize        The size, in bytes, of the property value contained in pData
+* @return       ::VzRetOK       if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetProperty(VzDeviceHandle device, const char* propertyKey, const void* pData, uint32_t dataSize);
+
+/**
+* @brief 		Returns a specific property value from the device
+* @param[in] 	device			The handle of the device from which to get the property value.
+* @param[in] 	propertyType	The type of property to get from the device
+* @param[out]	pData			Pointer to a buffer to store the returned property value.
+* @param[in]	dataSize		The size, in bytes, of the property value returned in pData
+* @return 		::VzRetOK		if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetProperty(VzDeviceHandle device, const char* propertyKey, void* pData, uint32_t dataSize);
+
+/**
+* @brief         Enables or disables the HDR Mode of the ToF sensor with VzExposureControlMode_Manual. Default enabled,  so if you want switch to the VzExposureControlMode_Auto, set HDR Mode disable firstly.
+* @param[in]     device       The handle of the device on which to enable or disable the feature.
+* @param[in]     bEnabled     Set to <code>true</code> to enable the feature or <code>false</code> to disable the feature.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetHDRModeEnabled(VzDeviceHandle device, bool bEnabled);
+/**
+* @brief         Returns the Boolean value of whether the HDRMode of ToF sensor feature is enabled or disabled.
+* @param[in]     device       The handle of the device on which to enable or disable the feature.
+* @param[in]     bEnabled     Set to <code>true</code> to enable the feature or <code>false</code> to disable the feature.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetHDRModeEnabled(VzDeviceHandle device, bool *bEnabled);
+
+/**
+* @brief         Set the input signal parameters for Hardware Trigger.
+* @param[in]     device       The handle of the device
+* @param[in]     params       Pointer to a variable in which to store the parameters.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetInputSignalParamsForHWTrigger(VzDeviceHandle device, VzInputSignalParamsForHWTrigger params);
+
+/**
+* @brief         Get the Input signal parameters for Hardware Trigger.
+* @param[in]     device       The handle of the device
+* @param[out]    pParams      Pointer to a variable in which to store the returned value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetInputSignalParamsForHWTrigger(VzDeviceHandle device, VzInputSignalParamsForHWTrigger *pParams);
+
+/**
+* @brief         Set the output signal parameters.
+* @param[in]     device       The handle of the device
+* @param[in]     params       Pointer to a variable in which to store the parameters.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetOutputSignalParams(VzDeviceHandle device, VzOutputSignalParams params);
+
+/**
+* @brief         Get the output signal parameters.
+* @param[in]     device       The handle of the device
+* @param[out]    pParams      Pointer to a variable in which to store the returned value.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetOutputSignalParams(VzDeviceHandle device, VzOutputSignalParams *pParams);
+
+/**
+* @brief         Set the parameters by Json file that can be saved by NebulaGUITool.
+* @param[in]     device       The handle of the device.
+* @param[in]     pfilePath    Pointer to the path of Json file.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetParamsByJson(VzDeviceHandle device, char* pfilePath);
+
+/**
+* @brief         Set the color Gain with the exposure mode of RGB sensor in VzExposureControlMode_Manual.
+* @param[in]     device       The handle of the device.
+* @param[in]     params       The value of color Gain.Value range: [1.0 15.5]
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_SetColorGain(VzDeviceHandle device, float params);
+
+/**
+* @brief         Get the color Gain.
+* @param[in]     device       The handle of the device.
+* @param[out]    params       The value of color Gain.
+* @return        ::VzRetOK    if the function succeeded, or one of the error values defined by ::VzReturnStatus.
+*/
+VZENSE_C_API_EXPORT VzReturnStatus VZ_GetColorGain(VzDeviceHandle device, float *pParams);
+
+#endif /* VZENSEDS_API_H */

+ 28 - 0
NebulaSDK/AArch64/include/VzenseNebula_define.h

@@ -0,0 +1,28 @@
+#ifndef VZENSEDS_DEFINE_H
+#define VZENSEDS_DEFINE_H
+
+#include "VzenseNebula_enums.h"
+#include "VzenseNebula_types.h"
+
+#ifdef PS_EXPORT_ON
+    #ifdef _WIN32
+        #define VZENSE_API_EXPORT __declspec(dllexport)
+    #else
+        #define VZENSE_API_EXPORT __attribute__((visibility("default")))
+    #endif
+#else
+    #ifdef _WIN32
+        #define VZENSE_API_EXPORT __declspec(dllimport)
+    #else
+        #define VZENSE_API_EXPORT __attribute__((visibility("default")))
+    #endif
+#endif
+
+#ifdef __cplusplus
+#define VZENSE_C_API_EXPORT extern "C" VZENSE_API_EXPORT
+#else
+#define VZENSE_C_API_EXPORT VZENSE_API_EXPORT
+#define bool uint8_t
+#endif
+
+#endif /* VZENSEDS_DEFINE_H */

+ 109 - 0
NebulaSDK/AArch64/include/VzenseNebula_enums.h

@@ -0,0 +1,109 @@
+#ifndef VZENSEDS_ENUMS_H
+#define VZENSEDS_ENUMS_H
+
+/**
+ * @brief Specifies the type of image frame.
+ */
+typedef enum{
+	VzDepthFrame = 0,                           //!< Depth frame with 16 bits per pixel in millimeters.
+	VzIRFrame = 1,                              //!< IR frame with 8 bits per pixel.
+	VzColorFrame = 3,                           //!< Color frame with 24 bits per pixel in RGB/BGR format.
+    VzTransformColorImgToDepthSensorFrame = 4,  //!< Color frame with 24 bits per pixel in RGB/BGR format, that is transformed to depth sensor space where the resolution is the same as the depth frame's resolution.\n 
+	                                            //!< This frame type can be enabled using ::VZ_SetTransformColorImgToDepthSensorEnabled().
+	VzTransformDepthImgToColorSensorFrame = 5,  //!< Depth frame with 16 bits per pixel, in millimeters, that is transformed to color sensor space where the resolution is same as the color frame's resolution.\n 
+	                                            //!< This frame type can be enabled using ::VZ_SetTransformDepthImgToColorSensorEnabled().
+	VzConfidenceFrame = 8,                      //!< Confidence frame with 16 bits per pixel.
+}VzFrameType;
+
+/**
+ * @brief Specifies the image pixel format.
+ */
+typedef enum{
+	VzPixelFormatDepthMM16 = 0,        //!< Depth image pixel format, 16 bits per pixel in mm.
+	VzPixelFormatGray8 = 2,            //!< Gray image pixel format, 8 bits per pixel.
+
+	//Color
+	VzPixelFormatRGB888 = 3,           //!< Color image pixel format, 24 bits per pixel RGB format.
+	VzPixelFormatBGR888 = 4           //!< Color image pixel format, 24 bits per pixel BGR format.
+}VzPixelFormat;
+
+/**
+ * @brief Specifies the type of sensor.
+ */
+typedef enum {
+    VzToFSensor = 0x01,          //!< ToF camera.
+    VzColorSensor = 0x02         //!< Color camera.
+}VzSensorType;
+
+/**
+ * @brief Return status codes for all APIs.\n 
+ * 		  <code>VzRetOK = 0</code> means the API successfully completed its operation.\n 
+ * 		  All other codes indicate a device, parameter, or API usage error.
+ */
+typedef enum
+{
+    VzRetOK                         =  0,   //!< The function completed successfully.
+    VzRetNoDeviceConnected          = -1,   //!< There is no depth camera connected or the camera has not been connected correctly. Check the hardware connection or try unplugging and re-plugging the USB cable.
+    VzRetInvalidDeviceIndex         = -2,   //!< The input device index is invalid.
+    VzRetDevicePointerIsNull        = -3,   //!< The device structure pointer is null.
+    VzRetInvalidFrameType           = -4,   //!< The input frame type is invalid.
+    VzRetFramePointerIsNull         = -5,   //!< The output frame buffer is null.
+    VzRetNoPropertyValueGet         = -6,   //!< Cannot get the value for the specified property.
+    VzRetNoPropertyValueSet         = -7,   //!< Cannot set the value for the specified property.
+    VzRetPropertyPointerIsNull      = -8,   //!< The input property value buffer pointer is null.
+    VzRetPropertySizeNotEnough      = -9,   //!< The input property value buffer size is too small to store the specified property value.
+    VzRetInvalidDepthRange          = -10,  //!< The input depth range mode is invalid.
+    VzRetGetFrameReadyTimeOut       = -11,  //!< Capture the next image frame time out.
+    VzRetInputPointerIsNull         = -12,  //!< An input pointer parameter is null.
+    VzRetCameraNotOpened            = -13,  //!< The camera has not been opened.
+    vzRetInvalidCameraType          = -14,  //!< The specified type of camera is invalid.
+    VzRetInvalidParams              = -15,  //!< One or more of the parameter values provided are invalid.
+    VzRetCurrentVersionNotSupport   = -16,  //!< This feature is not supported in the current version.
+    VzRetUpgradeImgError            = -17,  //!< There is an error in the upgrade file.
+    VzRetUpgradeImgPathTooLong      = -18,  //!< Upgrade file path length greater than 260.
+	VzRetUpgradeCallbackNotSet		= -19,  //!< VZ_SetUpgradeStatusCallback is not called.
+	VzRetProductNotSupport          = -20,  //!< The current product does not support this operation.
+	VzRetNoConfigFolder				= -21,  //!< No product profile found.
+	VzRetWebServerStartError        = -22,  //!< WebServer Start/Restart error(IP or PORT).
+	VzRetGetOverStayFrame           = -23,  //!< The time from frame ready to get frame is out of 1s
+	VzRetCreateLogDirError          = -24,  //!< Create log directory error
+	VzRetCreateLogFileError			= -25,  //!< Create log file error
+	VzRetNoAdapterConnected			= -100,	//!< There is no adapter connected
+	VzRetReInitialized				= -101,	//!< The SDK has been Initialized
+	VzRetNoInitialized				= -102,	//!< The SDK has not been Initialized
+	VzRetCameraOpened				= -103,	//!< The camera has been opened.
+	VzRetCmdError					= -104,	//!< Set/Get cmd control error
+	VzRetCmdSyncTimeOut				= -105,	//!< Set cmd ok.but time out for the sync return 
+    VzRetIPNotMatch                 = -106, //!< IP is not in the same network segment
+    VzRetNotStopStream              = -107, //!< Please invoke VZ_StopStream first to close the data stream
+    VzRetNotStartStream             = -108, //!< Please invoke VZ_StartStream first to get the data stream
+	VzRetNoDriversFolder			= -109, //!< Please invoke VZ_StartStream first to get the data stream
+
+	VzRetOthers = -255,	             //!< An unknown error occurred.
+}VzReturnStatus;
+
+typedef enum {
+	VzConnectUNKNOWN = 0,
+    VzUnconnected = 1,
+    VzConnected = 2,
+    VzOpened = 3,
+    VzUpgradeUnconnected = 4,
+    VzUpgradeConnected = 5,
+}VzConnectStatus;
+
+typedef enum
+{
+    VzActiveMode = 0x00,             //enter the active mode
+    VzHardwareTriggerMode = 0x01,    //enter the hardware salve mode, at this time need to connect the hardware trigger wire, provide hardware signal, to trigger the image
+    VzSoftwareTriggerMode = 0x02,    //enter the software salve mode, at this time need to invoke VZ_SetSoftwareSlaveTrigger, to trigger the image
+}VzWorkMode;
+
+typedef enum
+{
+    VzExposureControlMode_Auto = 0,
+    VzExposureControlMode_Manual = 1,
+}VzExposureControlMode;
+
+
+#endif /* VZENSEDS_ENUMS_H */
+

+ 206 - 0
NebulaSDK/AArch64/include/VzenseNebula_types.h

@@ -0,0 +1,206 @@
+#ifndef VZENSEDS_TYPES_H
+#define VZENSEDS_TYPES_H
+
+#include <stdint.h>
+#include "VzenseNebula_enums.h"
+
+typedef uint16_t VzDepthPixel;  //!< Depth image pixel type in 16-bit
+typedef uint16_t VzGray16Pixel; //!< Gray image pixel type in 16-bit
+typedef uint8_t VzGray8Pixel;   //!< Gray image pixel type in 8-bit
+
+#pragma pack (push, 1)
+/**
+ * @brief Color image pixel type in 24-bit RGB format.
+ */
+typedef struct
+{
+	uint8_t r;	//!< Red
+	uint8_t g;	//!< Green
+	uint8_t b;	//!< Blue
+} VzRGB888Pixel;
+
+/**
+ * @brief Color image pixel type in 24-bit BGR format.
+ */
+typedef struct
+{
+	uint8_t b;	//!< Blue
+	uint8_t g;	//!< Green
+	uint8_t r;	//!< Red
+} VzBGR888Pixel;
+
+/**
+ * @brief Stores the x, y, and z components of a 3D vector.
+ */
+typedef struct  
+{
+	float x, y, z;	//!< The x, y, and z components of the vector.
+}VzVector3f;
+
+/**
+ * @brief Stores the x, y, and z components of a 2D vector.
+ */
+typedef struct
+{
+	uint16_t x;
+	uint16_t y;
+}VzVector2u16;
+
+/**
+ * @brief Contains depth information for a given pixel.
+ */
+typedef struct
+{
+	int          depthX;    //!< The x coordinate of the pixel.
+	int          depthY;    //!< The y coordinate of the pixel.
+	VzDepthPixel depthZ;    //!< The depth of the pixel, in millimeters.
+}VzDepthVector3;
+
+/**
+ * @brief image resolution
+ */
+typedef struct {
+    int width;
+    int height;
+}VzResolution;
+
+/**
+ * @brief Supported resolutions.
+ */
+typedef struct
+{
+    int count;
+    VzResolution resolution[6];
+}VzResolutionList;
+
+/**
+ * @brief Camera intrinsic parameters and distortion coefficients.
+ */
+typedef struct
+{
+	double	fx;  //!< Focal length x (pixel)
+	double	fy;  //!< Focal length y (pixel)
+	double	cx;  //!< Principal point x (pixel)
+	double	cy;  //!< Principal point y (pixel)
+	double	k1;  //!< Radial distortion coefficient, 1st-order
+	double	k2;  //!< Radial distortion coefficient, 2nd-order
+	double	p1;  //!< Tangential distortion coefficient
+	double	p2;  //!< Tangential distortion coefficient
+	double	k3;  //!< Radial distortion coefficient, 3rd-order
+	double	k4;  //!< Radial distortion coefficient, 4st-order
+	double	k5;  //!< Radial distortion coefficient, 5nd-order
+	double	k6;  //!< Radial distortion coefficient, 6rd-order
+}VzSensorIntrinsicParameters;
+
+/** 
+ * @brief Extrinsic parameters defines the physical relationship form tof sensor to color sensor
+ */
+typedef struct
+{
+	double rotation[9];     //!< Orientation stored as an array of 9 double representing a 3x3 rotation matrix.
+	double translation[3];  //!< Location stored as an array of 3 double representing a 3-D translation vector.
+}VzSensorExtrinsicParameters;
+
+/**
+ * @brief Depth/IR/Color image frame data.
+ */
+typedef struct
+{
+	uint32_t       frameIndex;          //!< The index of the frame.
+	VzFrameType    frameType;           //!< The type of frame. See ::VzFrameType for more information.
+	VzPixelFormat  pixelFormat;         //!< The pixel format used by a frame. See ::VzPixelFormat for more information.
+	uint8_t*       pFrameData;          //!< A buffer containing the frame’s image data.
+	uint32_t       dataLen;             //!< The length of pFrame, in bytes.
+	float          exposureTime;        //!< The exposure time, in milliseconds.
+    uint8_t        depthRange;          //!< The depth range mode of the current frame. Used only for depth frames.
+	uint16_t       width;               //!< The width of the frame, in pixels.
+	uint16_t       height;              //!< The height of the frame, in pixels.
+    uint64_t       deviceTimestamp;     //!< The timestamp of the frame from the device.
+}VzFrame;
+
+typedef struct
+{
+	uint32_t depth : 1;
+	uint32_t ir : 1;
+	uint32_t color : 1;
+	uint32_t transformedColor : 1;
+	uint32_t transformedDepth : 1;
+	uint32_t confidence : 1;
+	uint32_t reserved : 26;
+}VzFrameReady;
+
+typedef void* VzDeviceHandle;
+
+typedef struct
+{
+	char productName[64];
+    char uri[256];
+	char alias[64];
+    char serialNumber[64];
+    char ip[17];
+	VzConnectStatus status;
+}VzDeviceInfo;
+
+typedef struct
+{
+	int threshold;//[0, 3],The larger the value is, the more obvious the filtering effect is and The smaller the point cloud wobble
+    bool enable;
+} VzTimeFilterParams;
+
+typedef struct
+{
+	int threshold;//[0, 100],The larger the value is, the more obvious the filtering effect is and the more points are filtered out
+    bool enable;
+} VzConfidenceFilterParams;
+
+typedef struct
+{
+    int	threshold;//[0, 49],The larger the value is, the more obvious the filtering effect is and the more points are filtered out
+    bool enable;
+} VzFlyingPixelFilterParams;
+
+typedef struct
+{
+    VzExposureControlMode mode;
+    int	exposureTime;              //When the control mode is AE,  exposureTime represents the maximum exposure time.
+                                   //When the control mode is Manual, exposureTime represents the current exposure time.
+} VzExposureTimeParams;
+
+
+
+/**
+ * @brief Error informations about the device
+ */
+typedef struct
+{
+    int errorCount;                     //The count of error messages, the maximum number is 10
+    char errorMessage[10][64];          //The maximum length of each error message is 64(contains '\0').
+} VzDeviceErrorInfo;
+
+typedef struct
+{	
+	uint16_t width;                      //[1,65535],The width of input signal.
+	uint16_t interval;                   //[34000,65535],The interval of input signal.
+	uint8_t polarity;                    //[0,1],0 for active low, 1 for active high.
+}VzInputSignalParamsForHWTrigger;        //Input signal parameters for Hardware Trigger.
+
+typedef struct
+{
+	uint16_t width;                      //[1,65535],The width of output signal.
+	uint16_t delay;                      //[0,65535],The delay time of output signal.
+	uint8_t polarity;                    //[0,1],0 for active low, 1 for active high.
+}VzOutputSignalParams;                   //Output signal parameters.
+
+#pragma pack (pop)
+
+/**
+* @brief hotplug status callback function
+* pInfo     return the info of the Device, See ::VzDeviceInfo
+* state     0:device added , 1:device removed
+* pUserData Pointer to user data, which can be null
+*/
+typedef void(*PtrHotPlugStatusCallback)(const VzDeviceInfo* pInfo, int state, void* pUserData);
+
+typedef void(*PtrUpgradeStatusCallback)(int status, int params, void* pUserData);
+
+#endif /* VZENSEDS_TYPES_H */

+ 13 - 0
NebulaSDK/AArch64/lib/Config/DS77CLite_0F.json

@@ -0,0 +1,13 @@
+{
+    "productName": "DS77CLite",
+    "connectType":"socket",
+    "colorSensor": [
+        {
+            "type": "gc2053",
+            "resolutionList": ["1600_1200","800_600", "640_480"]
+        }    ],
+    "toFSensor": [
+        {
+            "type": "sony_cw_2022"
+        }]
+}

+ 13 - 0
NebulaSDK/AArch64/lib/Config/DS77CPro_0E.json

@@ -0,0 +1,13 @@
+{
+    "productName": "DS77CPro",
+    "connectType":"socket",
+    "colorSensor": [
+        {
+            "type": "gc2053",
+            "resolutionList": ["1600_1200","800_600", "640_480"]
+        }    ],
+    "toFSensor": [
+        {
+            "type": "sony_cw_2022"
+        }]
+}

+ 8 - 0
NebulaSDK/AArch64/lib/Config/DS77Lite_11.json

@@ -0,0 +1,8 @@
+{
+    "productName": "DS77Lite",
+    "connectType":"socket",
+    "toFSensor": [
+        {
+            "type": "sony_cw_2022"
+        }]
+}

+ 8 - 0
NebulaSDK/AArch64/lib/Config/DS77Pro_10.json

@@ -0,0 +1,8 @@
+{
+    "productName": "DS77Pro",
+    "connectType":"socket",
+    "toFSensor": [
+        {
+            "type": "sony_cw_2022"
+        }]
+}

+ 13 - 0
NebulaSDK/AArch64/lib/Config/DS86_12.json

@@ -0,0 +1,13 @@
+{
+    "productName": "DS86",
+    "connectType":"socket",
+    "colorSensor": [
+    {
+      "type": "gc2053",
+      "resolutionList": [ "1600_1200", "800_600", "640_480" ]
+    }    ],
+    "toFSensor": [
+        {
+            "type": "sony_cw_2022"
+        }]
+}

+ 13 - 0
NebulaSDK/AArch64/lib/Config/DS87_13.json

@@ -0,0 +1,13 @@
+{
+    "productName": "DS87",
+    "connectType":"socket",
+    "colorSensor": [
+    {
+      "type": "gc2053",
+      "resolutionList": [ "1600_1200", "800_600", "640_480" ]
+    }    ],
+    "toFSensor": [
+        {
+            "type": "sony_cw_2022"
+        }]
+}

文件差异内容过多而无法显示
+ 2930 - 0
communication/communication.pb.cc


文件差异内容过多而无法显示
+ 2196 - 0
communication/communication.pb.h


+ 76 - 0
communication/communication.proto

@@ -0,0 +1,76 @@
+syntax = "proto2";
+
+enum Tof3dFunctional {
+  SearchDevice = 1;
+  ConnectDevice = 2;
+  ConnectAllEtcDevices = 3;
+  ConnectAllSearchDevices = 4;
+  DisConnectDevice = 5;
+  DisConnectAllEtcDevices = 6;
+  DisConnectAllSearchDevices = 7;
+  getDepthFrame = 8;
+  getIrFrame = 9;
+  getDepthAndIrPicture = 10;
+  getDepthPointCloud = 11;
+  DepthFrame2PointCloud = 12;
+  Frame2Mat = 13;
+}
+
+enum Tof3dVzFrameType {
+  Tof3dVzDepthFrame = 0;
+  Tof3dVzIRFrame = 1;
+}
+
+enum Tof3dVzPixelFormat {
+  Tof3dVzPixelFormatDepthMM16 = 0;
+  Tof3dVzPixelFormatGray8 = 2;
+}
+
+message Tof3dVzFrame {
+  required uint32 frameIndex = 1;
+  required Tof3dVzFrameType frameType = 2;
+  required Tof3dVzPixelFormat pixelFormat = 3;
+  repeated uint32 pFrameData = 4;
+  required uint32 dataLen = 5;
+  required float exposureTime = 6;
+  required uint32 depthRange = 7;
+  required uint32 width = 8;
+  required uint32 height = 9;
+  required uint64 deviceTimestamp = 10;
+}
+
+message Tof3dVzVector3f {
+  required int32 x = 1;
+  required int32 y = 2;
+  required int32 z = 3;
+}
+
+message Tof3dMat {
+  repeated uint32 data = 1;
+  required int32 size = 2;
+  required int32 rows = 3;
+  required int32 cols = 4;
+  required uint32 type = 5;
+  required uint64 stamp = 10;
+}
+
+message Tof3dFunctionalParams {
+  optional int64 error_code = 1;
+  optional int64 error_level = 2;
+  optional string error_string = 3;
+  optional string ip = 4;
+  optional double search_time = 5;
+  optional bool open_stream = 6;
+  optional Tof3dVzFrame depthFrame = 7;
+  optional Tof3dVzFrame irFrame = 8;
+  repeated Tof3dVzVector3f depthCloud = 9;
+  repeated Tof3dVzVector3f irCloud = 10;
+  optional Tof3dMat depthMat = 11;
+  optional Tof3dMat irMat = 12;
+}
+
+message Tof3dCommunicatonProtocol
+{
+  required Tof3dFunctional func = 1;
+  required Tof3dFunctionalParams func_params = 2;
+}

+ 214 - 0
communication/protocol_conversion.hpp

@@ -0,0 +1,214 @@
+#pragma once
+
+#include "communication.pb.h"
+#include "vzense/device_tof3d.h"
+
+class VzenseTof3dProtocolConversion {
+public:
+    static std::string ip(const Tof3dFunctionalParams &message) {
+        return message.has_ip() ? message.ip() : "";
+    }
+
+    static double searchTime(const Tof3dFunctionalParams &message) {
+        return message.has_search_time() ? message.search_time() : 0;
+    }
+
+    static bool open_stream(const Tof3dFunctionalParams &message) {
+        return message.has_open_stream() ? message.open_stream() : false;
+    }
+
+    static VzFrame depthFrame(const Tof3dFunctionalParams &message) {
+        return message.has_depthframe() ? tof3dVzFrame2VzFrame(message.depthframe()) : VzFrame();
+    }
+
+    static VzFrame tof3dVzFrame2VzFrame(const Tof3dVzFrame &tof_frame) {
+        VzFrame frame;
+        frame.frameIndex = tof_frame.frameindex();
+        frame.frameType = (VzFrameType)tof_frame.frametype();
+        frame.pixelFormat = (VzPixelFormat)tof_frame.pixelformat();
+        frame.dataLen = tof_frame.datalen();
+        frame.exposureTime = tof_frame.exposuretime();
+        frame.depthRange = tof_frame.depthrange();
+        frame.width = tof_frame.width();
+        frame.height = tof_frame.height();
+        frame.deviceTimestamp = tof_frame.devicetimestamp();
+        frame.pFrameData = (uint8_t*)malloc(tof_frame.datalen());
+
+        for (int i = 0; i < frame.dataLen; i += 4) {
+            frame.pFrameData[i] = (tof_frame.pframedata(i/4) >> 24) & 0xff;
+            frame.pFrameData[i + 1] = (tof_frame.pframedata(i/4) >> 16) & 0xff;
+            frame.pFrameData[i + 2] = (tof_frame.pframedata(i/4) >> 8) & 0xff;
+            frame.pFrameData[i + 3] = (tof_frame.pframedata(i/4) >> 0) & 0xff;
+        }
+
+        return frame;
+    }
+
+    static Tof3dVzFrame tof3dVzFrame2VzFrame(const VzFrame &frame) {
+        Tof3dVzFrame tof_frame;
+        // 图片数据转换
+        if (frame.dataLen%4 == 0) {
+            for (int i = 0; i < frame.dataLen; i += 4) {
+                tof_frame.add_pframedata(frame.pFrameData[i] << 24 |
+                                         frame.pFrameData[i + 1] << 16 |
+                                         frame.pFrameData[i + 2] << 8 |
+                                         frame.pFrameData[i + 3]);
+            }
+        } else {
+            LOG(WARNING) << "tof_frame dataLen error: " << frame.dataLen;
+            return tof_frame;
+        }
+
+        tof_frame.set_frameindex(frame.frameIndex);
+        tof_frame.set_frametype((Tof3dVzFrameType)frame.frameType);
+        tof_frame.set_pixelformat((Tof3dVzPixelFormat)frame.pixelFormat);
+        tof_frame.set_datalen(frame.dataLen);
+        tof_frame.set_exposuretime(frame.exposureTime);
+        tof_frame.set_depthrange(frame.depthRange);
+        tof_frame.set_width(frame.width);
+        tof_frame.set_height(frame.height);
+        tof_frame.set_devicetimestamp(frame.deviceTimestamp);
+        return tof_frame;
+    }
+
+    static void pclCloud2VzList(pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud, std::vector<VzVector3f> &list) {
+        for (auto &point: cloud->points) {
+            VzVector3f vzp;
+            vzp.x = point.x;
+            vzp.y = point.y;
+            vzp.z = point.z;
+            list.emplace_back(vzp);
+        }
+    }
+    static void VzList2pclCloud(const std::vector<VzVector3f> &list, pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud) {
+        for (auto &point: list) {
+            cloud->points.emplace_back(point.x, point.y, point.z);
+        }
+    }
+
+    static void Tof3dCommunicatonProtocolAddCloud(Tof3dCommunicatonProtocol &message, pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud) {
+        for (auto &point: cloud->points) {
+            Tof3dVzVector3f vzp;
+            vzp.set_x(point.x);
+            vzp.set_y(point.y);
+            vzp.set_z(point.z);
+            message.mutable_func_params()->add_depthcloud()->CopyFrom(vzp);
+        }
+    }
+
+    static Tof3dMat CvMat2Tof3dMat(const cv::Mat &mat, uint64 stamp) {
+        Tof3dMat frame;
+        frame.set_rows(mat.rows);
+        frame.set_cols(mat.cols);
+        frame.set_type(mat.type());
+        frame.set_size(mat.rows * mat.cols * mat.channels());
+        frame.set_stamp(stamp);
+        for (int i = 0; i < frame.size(); i += 4) {
+            frame.add_data((mat.data[i + 0]  << 24) |
+                           (mat.data[i + 1]  << 16) |
+                           (mat.data[i + 2]  << 8) |
+                           (mat.data[i + 3]  << 0));
+        }
+        return frame;
+    }
+
+    static cv::Mat Tof3dMat2CvMat(const Tof3dMat &tof_mat) {
+        cv::Mat mat(tof_mat.rows(), tof_mat.cols(), tof_mat.type());
+        for (int i = 0; i < tof_mat.size(); i += 4) {
+            mat.data[i] = (tof_mat.data(i/4) >> 24) & 0xff;
+            mat.data[i + 1] = (tof_mat.data(i/4) >> 16) & 0xff;
+            mat.data[i + 2] = (tof_mat.data(i/4) >> 8) & 0xff;
+            mat.data[i + 3] = (tof_mat.data(i/4) >> 0) & 0xff;
+        }
+        return mat;
+    }
+
+    static Tof3dCommunicatonProtocol runFunc(Tof3dCommunicatonProtocol &protocol) {
+        Tof3dCommunicatonProtocol protocol_resp;
+        protocol_resp.set_func(protocol.func());
+        protocol_resp.mutable_func_params()->set_ip(protocol.func_params().ip());
+        Error_manager ret;
+        VzFrame irFrame;
+        VzFrame depthFrame;
+        cv::Mat depthMat(480, 640, CV_16UC1);
+        cv::Mat irMat(480, 640, CV_8UC1);
+        pcl::PointCloud<pcl::PointXYZ>::Ptr depthCloud(new pcl::PointCloud<pcl::PointXYZ>);
+        pcl::PointCloud<pcl::PointXYZ>::Ptr irCloud(new pcl::PointCloud<pcl::PointXYZ>);
+        auto t = std::chrono::steady_clock::now();
+        std::chrono::duration<double> cost{};
+        switch (protocol.func()) {
+            case SearchDevice:
+                ret = DeviceTof3D::iter()->SearchDevice(searchTime(protocol.func_params()));
+                break;
+            case ConnectDevice:
+                ret = DeviceTof3D::iter()->ConnectDevice(ip(protocol.func_params()), open_stream(protocol.func_params()));
+                break;
+            case ConnectAllEtcDevices:
+                ret = DeviceTof3D::iter()->ConnectAllEtcDevices(open_stream(protocol.func_params()));
+                break;
+            case ConnectAllSearchDevices:
+                ret = DeviceTof3D::iter()->ConnectAllDevices(open_stream(protocol.func_params()));
+                break;
+            case DisConnectDevice:
+                ret = DeviceTof3D::iter()->DisConnectDevice(ip(protocol.func_params()));
+                break;
+            case DisConnectAllEtcDevices:
+                ret = DeviceTof3D::iter()->DisConnectAllEtcDevices();
+                break;
+            case DisConnectAllSearchDevices:
+                ret = DeviceTof3D::iter()->DisConnectAllDevices();
+                break;
+            case getDepthFrame:
+                ret = DeviceTof3D::iter()->getDepthFrame(ip(protocol.func_params()), depthFrame);
+                if (ret == SUCCESS) {
+                    protocol_resp.mutable_func_params()->mutable_depthframe()->CopyFrom(tof3dVzFrame2VzFrame(depthFrame));
+                }
+                break;
+            case getIrFrame:
+                DeviceTof3D::iter()->getIrFrame(ip(protocol.func_params()), irFrame);
+                if (ret == SUCCESS) {
+                    protocol_resp.mutable_func_params()->mutable_irframe()->CopyFrom(tof3dVzFrame2VzFrame(irFrame));
+                }
+                break;
+            case getDepthAndIrPicture:
+                DeviceTof3D::iter()->getDepthAndIrPicture(ip(protocol.func_params()), depthFrame, irFrame);
+                if (ret == SUCCESS) {
+                    protocol_resp.mutable_func_params()->mutable_depthframe()->CopyFrom(tof3dVzFrame2VzFrame(irFrame));
+                    protocol_resp.mutable_func_params()->mutable_irframe()->CopyFrom(tof3dVzFrame2VzFrame(irFrame));
+                }
+                break;
+            case getDepthPointCloud:
+                DeviceTof3D::iter()->getDepthPointCloud(ip(protocol.func_params()), depthCloud);
+                Tof3dCommunicatonProtocolAddCloud(protocol_resp, depthCloud);
+                break;
+            case DepthFrame2PointCloud:
+                depthFrame = tof3dVzFrame2VzFrame(protocol.func_params().depthframe());
+                DeviceTof3D::iter()->DepthFrame2PointCloud(ip(protocol.func_params()), depthFrame, depthCloud);
+                Tof3dCommunicatonProtocolAddCloud(protocol_resp, depthCloud);
+                break;
+            case Frame2Mat:
+                ret = DeviceTof3D::iter()->getDepthAndIrPicture(ip(protocol.func_params()), depthFrame, irFrame);
+                if (ret == SUCCESS) {
+                    DeviceTof3D::iter()->Frame2Mat(depthFrame, depthMat);
+                    DeviceTof3D::iter()->Frame2Mat(irFrame, irMat);
+                    protocol_resp.mutable_func_params()->mutable_depthmat()->CopyFrom(CvMat2Tof3dMat(depthMat, depthFrame.deviceTimestamp));
+                    protocol_resp.mutable_func_params()->mutable_irmat()->CopyFrom(CvMat2Tof3dMat(irMat, irFrame.deviceTimestamp));
+                }
+                break;
+            default:
+                break;
+        }
+        protocol_resp.mutable_func_params()->set_error_code(ret.get_error_code());
+        protocol_resp.mutable_func_params()->set_error_level(ret.get_error_level());
+        protocol_resp.mutable_func_params()->set_error_string(ret.get_error_description());
+        if (ret != SUCCESS) {
+            LOG(WARNING) << protocol_resp.DebugString();
+        }
+        return protocol_resp;
+    }
+private:
+
+};
+
+
+

+ 41 - 0
communication/rabbitmq_communication.cpp

@@ -0,0 +1,41 @@
+#include "rabbitmq_communication.h"
+
+RabbitmqCommunicationTof3D::RabbitmqCommunicationTof3D() = default;
+
+RabbitmqCommunicationTof3D::~RabbitmqCommunicationTof3D() = default;
+
+
+//检查消息是否有效, 主要检查消息类型和接受者, 判断这条消息是不是给我的.
+Error_manager RabbitmqCommunicationTof3D::check_msg(Rabbitmq_message *p_msg) {
+    return {SUCCESS, NORMAL, "Don't check any message."};
+}
+
+//检查执行者的状态, 判断能否处理这条消息, 需要子类重载
+Error_manager RabbitmqCommunicationTof3D::check_executer(Rabbitmq_message *p_msg) {
+    return {SUCCESS, NORMAL, "Don't check any message."};
+}
+
+//处理消息, 需要子类重载
+Error_manager RabbitmqCommunicationTof3D::execute_msg(Rabbitmq_message *p_msg) {
+    Error_manager ret;
+    return ret;
+}
+
+//处理消息, 需要子类重载
+Error_manager RabbitmqCommunicationTof3D::execute_time_consume_msg(Rabbitmq_message *p_msg) {
+//    LOG(INFO) << "get message from " << p_msg->m_routing_key;
+    return {};
+}
+
+//定时封装发送消息, 一般为心跳和状态信息, 需要子类重载
+Error_manager RabbitmqCommunicationTof3D::auto_encapsulate_status() {
+    return {};
+}
+
+
+
+
+
+
+
+

+ 49 - 0
communication/rabbitmq_communication.h

@@ -0,0 +1,49 @@
+#pragma once
+
+#include "thread/singleton.h"
+#include "rabbitmq/rabbitmq_base.h"
+#include "protocol_conversion.hpp"
+
+class RabbitmqCommunicationTof3D : public Singleton<RabbitmqCommunicationTof3D>, public Rabbitmq_base {
+// 子类必须把父类设定为友元函数,这样父类才能使用子类的私有构造函数。
+    friend class Singleton<RabbitmqCommunicationTof3D>;
+
+private:
+    // 父类的构造函数必须保护,子类的构造函数必须私有。
+    RabbitmqCommunicationTof3D();
+
+public:
+    //必须关闭拷贝构造和赋值构造,只能通过 get_instance 函数来进行操作唯一的实例。
+    RabbitmqCommunicationTof3D(const RabbitmqCommunicationTof3D &other) = delete;
+
+    RabbitmqCommunicationTof3D &operator=(const RabbitmqCommunicationTof3D &other) = delete;
+
+    ~RabbitmqCommunicationTof3D();
+
+public://API functions
+    //检查消息是否有效, 主要检查消息类型和接受者, 判断这条消息是不是给我的.
+    virtual Error_manager check_msg(Rabbitmq_message *p_msg);
+
+    //检查执行者的状态, 判断能否处理这条消息, 需要子类重载
+    virtual Error_manager check_executer(Rabbitmq_message *p_msg);
+
+    //处理消息, 需要子类重载
+    virtual Error_manager execute_msg(Rabbitmq_message *p_msg);
+
+    //处理消息, 需要子类重载
+    virtual Error_manager execute_time_consume_msg(Rabbitmq_message *p_msg);
+
+    //定时封装发送消息, 一般为心跳和状态信息, 需要子类重载
+    virtual Error_manager auto_encapsulate_status();
+
+public://get or set member variable
+
+
+protected://member variable
+
+public:
+    std::mutex m_lock;                                //锁,
+
+private:
+
+};

+ 142 - 0
detect/common.hpp

@@ -0,0 +1,142 @@
+//
+// Created by ubuntu on 3/16/23.
+//
+
+#ifndef JETSON_SEGMENT_COMMON_HPP
+#define JETSON_SEGMENT_COMMON_HPP
+#include "NvInfer.h"
+#include "opencv2/opencv.hpp"
+#include <sys/stat.h>
+#include <unistd.h>
+
+#define CHECK(call)                                                                                                    \
+    do {                                                                                                               \
+        const cudaError_t error_code = call;                                                                           \
+        if (error_code != cudaSuccess) {                                                                               \
+            printf("CUDA Error:\n");                                                                                   \
+            printf("    File:       %s\n", __FILE__);                                                                  \
+            printf("    Line:       %d\n", __LINE__);                                                                  \
+            printf("    Error code: %d\n", error_code);                                                                \
+            printf("    Error text: %s\n", cudaGetErrorString(error_code));                                            \
+            exit(1);                                                                                                   \
+        }                                                                                                              \
+    } while (0)
+
+class Logger: public nvinfer1::ILogger {
+public:
+    nvinfer1::ILogger::Severity reportableSeverity;
+
+    explicit Logger(nvinfer1::ILogger::Severity severity = nvinfer1::ILogger::Severity::kINFO):
+        reportableSeverity(severity)
+    {
+    }
+
+    void log(nvinfer1::ILogger::Severity severity, const char* msg) noexcept override
+    {
+        if (severity > reportableSeverity) {
+            return;
+        }
+        switch (severity) {
+            case nvinfer1::ILogger::Severity::kINTERNAL_ERROR:
+                std::cerr << "INTERNAL_ERROR: ";
+                break;
+            case nvinfer1::ILogger::Severity::kERROR:
+                std::cerr << "ERROR: ";
+                break;
+            case nvinfer1::ILogger::Severity::kWARNING:
+                std::cerr << "WARNING: ";
+                break;
+            case nvinfer1::ILogger::Severity::kINFO:
+                std::cerr << "INFO: ";
+                break;
+            default:
+                std::cerr << "VERBOSE: ";
+                break;
+        }
+        std::cerr << msg << std::endl;
+    }
+};
+
+inline int get_size_by_dims(const nvinfer1::Dims& dims)
+{
+    int size = 1;
+    for (int i = 0; i < dims.nbDims; i++) {
+        size *= dims.d[i];
+    }
+    return size;
+}
+
+inline int type_to_size(const nvinfer1::DataType& dataType)
+{
+    switch (dataType) {
+        case nvinfer1::DataType::kFLOAT:
+            return 4;
+        case nvinfer1::DataType::kHALF:
+            return 2;
+        case nvinfer1::DataType::kINT32:
+            return 4;
+        case nvinfer1::DataType::kINT8:
+            return 1;
+        case nvinfer1::DataType::kBOOL:
+            return 1;
+        default:
+            return 4;
+    }
+}
+
+inline static float clamp(float val, float min, float max)
+{
+    return val > min ? (val < max ? val : max) : min;
+}
+
+inline bool IsPathExist(const std::string& path)
+{
+    if (access(path.c_str(), 0) == F_OK) {
+        return true;
+    }
+    return false;
+}
+
+inline bool IsFile(const std::string& path)
+{
+    if (!IsPathExist(path)) {
+        printf("%s:%d %s not exist\n", __FILE__, __LINE__, path.c_str());
+        return false;
+    }
+    struct stat buffer;
+    return (stat(path.c_str(), &buffer) == 0 && S_ISREG(buffer.st_mode));
+}
+
+inline bool IsFolder(const std::string& path)
+{
+    if (!IsPathExist(path)) {
+        return false;
+    }
+    struct stat buffer;
+    return (stat(path.c_str(), &buffer) == 0 && S_ISDIR(buffer.st_mode));
+}
+
+namespace seg {
+struct Binding {
+    size_t         size  = 1;
+    size_t         dsize = 1;
+    nvinfer1::Dims dims;
+    std::string    name;
+};
+
+struct Object {
+    cv::Rect_<float> rect;
+    int              label = 0;
+    float            prob  = 0.0;
+    cv::Mat          boxMask;
+};
+
+struct PreParam {
+    float ratio  = 1.0f;
+    float dw     = 0.0f;
+    float dh     = 0.0f;
+    float height = 0;
+    float width  = 0;
+};
+}  // namespace seg
+#endif  // JETSON_SEGMENT_COMMON_HPP

+ 658 - 0
detect/stream/def.pb.cc

@@ -0,0 +1,658 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: def.proto
+
+#include "def.pb.h"
+
+#include <algorithm>
+
+#include <google/protobuf/stubs/common.h>
+#include <google/protobuf/stubs/port.h>
+#include <google/protobuf/io/coded_stream.h>
+#include <google/protobuf/wire_format_lite_inl.h>
+#include <google/protobuf/descriptor.h>
+#include <google/protobuf/generated_message_reflection.h>
+#include <google/protobuf/reflection_ops.h>
+#include <google/protobuf/wire_format.h>
+// This is a temporary google only hack
+#ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
+#include "third_party/protobuf/version.h"
+#endif
+// @@protoc_insertion_point(includes)
+
+namespace JetStream {
+class RequestCmdDefaultTypeInternal {
+ public:
+  ::google::protobuf::internal::ExplicitlyConstructed<RequestCmd>
+      _instance;
+} _RequestCmd_default_instance_;
+class ResponseDefaultTypeInternal {
+ public:
+  ::google::protobuf::internal::ExplicitlyConstructed<Response>
+      _instance;
+} _Response_default_instance_;
+}  // namespace JetStream
+namespace protobuf_def_2eproto {
+static void InitDefaultsRequestCmd() {
+  GOOGLE_PROTOBUF_VERIFY_VERSION;
+
+  {
+    void* ptr = &::JetStream::_RequestCmd_default_instance_;
+    new (ptr) ::JetStream::RequestCmd();
+    ::google::protobuf::internal::OnShutdownDestroyMessage(ptr);
+  }
+  ::JetStream::RequestCmd::InitAsDefaultInstance();
+}
+
+::google::protobuf::internal::SCCInfo<0> scc_info_RequestCmd =
+    {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsRequestCmd}, {}};
+
+static void InitDefaultsResponse() {
+  GOOGLE_PROTOBUF_VERIFY_VERSION;
+
+  {
+    void* ptr = &::JetStream::_Response_default_instance_;
+    new (ptr) ::JetStream::Response();
+    ::google::protobuf::internal::OnShutdownDestroyMessage(ptr);
+  }
+  ::JetStream::Response::InitAsDefaultInstance();
+}
+
+::google::protobuf::internal::SCCInfo<0> scc_info_Response =
+    {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsResponse}, {}};
+
+void InitDefaults() {
+  ::google::protobuf::internal::InitSCC(&scc_info_RequestCmd.base);
+  ::google::protobuf::internal::InitSCC(&scc_info_Response.base);
+}
+
+::google::protobuf::Metadata file_level_metadata[2];
+
+const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
+  ~0u,  // no _has_bits_
+  GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::JetStream::RequestCmd, _internal_metadata_),
+  ~0u,  // no _extensions_
+  ~0u,  // no _oneof_case_
+  ~0u,  // no _weak_field_map_
+  GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::JetStream::RequestCmd, action_),
+  ~0u,  // no _has_bits_
+  GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::JetStream::Response, _internal_metadata_),
+  ~0u,  // no _extensions_
+  ~0u,  // no _oneof_case_
+  ~0u,  // no _weak_field_map_
+  GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::JetStream::Response, ret_),
+  GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::JetStream::Response, info_),
+};
+static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
+  { 0, -1, sizeof(::JetStream::RequestCmd)},
+  { 6, -1, sizeof(::JetStream::Response)},
+};
+
+static ::google::protobuf::Message const * const file_default_instances[] = {
+  reinterpret_cast<const ::google::protobuf::Message*>(&::JetStream::_RequestCmd_default_instance_),
+  reinterpret_cast<const ::google::protobuf::Message*>(&::JetStream::_Response_default_instance_),
+};
+
+void protobuf_AssignDescriptors() {
+  AddDescriptors();
+  AssignDescriptors(
+      "def.proto", schemas, file_default_instances, TableStruct::offsets,
+      file_level_metadata, NULL, NULL);
+}
+
+void protobuf_AssignDescriptorsOnce() {
+  static ::google::protobuf::internal::once_flag once;
+  ::google::protobuf::internal::call_once(once, protobuf_AssignDescriptors);
+}
+
+void protobuf_RegisterTypes(const ::std::string&) GOOGLE_PROTOBUF_ATTRIBUTE_COLD;
+void protobuf_RegisterTypes(const ::std::string&) {
+  protobuf_AssignDescriptorsOnce();
+  ::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 2);
+}
+
+void AddDescriptorsImpl() {
+  InitDefaults();
+  static const char descriptor[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
+      "\n\tdef.proto\022\tJetStream\"\034\n\nRequestCmd\022\016\n\006"
+      "action\030\001 \001(\005\"%\n\010Response\022\013\n\003ret\030\001 \001(\005\022\014\n"
+      "\004info\030\002 \001(\t2L\n\nNavExcutor\022>\n\014DetectStrea"
+      "m\022\025.JetStream.RequestCmd\032\023.JetStream.Res"
+      "ponse\"\0000\001b\006proto3"
+  };
+  ::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
+      descriptor, 177);
+  ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
+    "def.proto", &protobuf_RegisterTypes);
+}
+
+void AddDescriptors() {
+  static ::google::protobuf::internal::once_flag once;
+  ::google::protobuf::internal::call_once(once, AddDescriptorsImpl);
+}
+// Force AddDescriptors() to be called at dynamic initialization time.
+struct StaticDescriptorInitializer {
+  StaticDescriptorInitializer() {
+    AddDescriptors();
+  }
+} static_descriptor_initializer;
+}  // namespace protobuf_def_2eproto
+namespace JetStream {
+
+// ===================================================================
+
+void RequestCmd::InitAsDefaultInstance() {
+}
+#if !defined(_MSC_VER) || _MSC_VER >= 1900
+const int RequestCmd::kActionFieldNumber;
+#endif  // !defined(_MSC_VER) || _MSC_VER >= 1900
+
+RequestCmd::RequestCmd()
+  : ::google::protobuf::Message(), _internal_metadata_(NULL) {
+  ::google::protobuf::internal::InitSCC(
+      &protobuf_def_2eproto::scc_info_RequestCmd.base);
+  SharedCtor();
+  // @@protoc_insertion_point(constructor:JetStream.RequestCmd)
+}
+RequestCmd::RequestCmd(const RequestCmd& from)
+  : ::google::protobuf::Message(),
+      _internal_metadata_(NULL) {
+  _internal_metadata_.MergeFrom(from._internal_metadata_);
+  action_ = from.action_;
+  // @@protoc_insertion_point(copy_constructor:JetStream.RequestCmd)
+}
+
+void RequestCmd::SharedCtor() {
+  action_ = 0;
+}
+
+RequestCmd::~RequestCmd() {
+  // @@protoc_insertion_point(destructor:JetStream.RequestCmd)
+  SharedDtor();
+}
+
+void RequestCmd::SharedDtor() {
+}
+
+void RequestCmd::SetCachedSize(int size) const {
+  _cached_size_.Set(size);
+}
+const ::google::protobuf::Descriptor* RequestCmd::descriptor() {
+  ::protobuf_def_2eproto::protobuf_AssignDescriptorsOnce();
+  return ::protobuf_def_2eproto::file_level_metadata[kIndexInFileMessages].descriptor;
+}
+
+const RequestCmd& RequestCmd::default_instance() {
+  ::google::protobuf::internal::InitSCC(&protobuf_def_2eproto::scc_info_RequestCmd.base);
+  return *internal_default_instance();
+}
+
+
+void RequestCmd::Clear() {
+// @@protoc_insertion_point(message_clear_start:JetStream.RequestCmd)
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  // Prevent compiler warnings about cached_has_bits being unused
+  (void) cached_has_bits;
+
+  action_ = 0;
+  _internal_metadata_.Clear();
+}
+
+bool RequestCmd::MergePartialFromCodedStream(
+    ::google::protobuf::io::CodedInputStream* input) {
+#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
+  ::google::protobuf::uint32 tag;
+  // @@protoc_insertion_point(parse_start:JetStream.RequestCmd)
+  for (;;) {
+    ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
+    tag = p.first;
+    if (!p.second) goto handle_unusual;
+    switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
+      // int32 action = 1;
+      case 1: {
+        if (static_cast< ::google::protobuf::uint8>(tag) ==
+            static_cast< ::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
+
+          DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
+                   ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
+                 input, &action_)));
+        } else {
+          goto handle_unusual;
+        }
+        break;
+      }
+
+      default: {
+      handle_unusual:
+        if (tag == 0) {
+          goto success;
+        }
+        DO_(::google::protobuf::internal::WireFormat::SkipField(
+              input, tag, _internal_metadata_.mutable_unknown_fields()));
+        break;
+      }
+    }
+  }
+success:
+  // @@protoc_insertion_point(parse_success:JetStream.RequestCmd)
+  return true;
+failure:
+  // @@protoc_insertion_point(parse_failure:JetStream.RequestCmd)
+  return false;
+#undef DO_
+}
+
+void RequestCmd::SerializeWithCachedSizes(
+    ::google::protobuf::io::CodedOutputStream* output) const {
+  // @@protoc_insertion_point(serialize_start:JetStream.RequestCmd)
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  (void) cached_has_bits;
+
+  // int32 action = 1;
+  if (this->action() != 0) {
+    ::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->action(), output);
+  }
+
+  if ((_internal_metadata_.have_unknown_fields() &&  ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
+    ::google::protobuf::internal::WireFormat::SerializeUnknownFields(
+        (::google::protobuf::internal::GetProto3PreserveUnknownsDefault()   ? _internal_metadata_.unknown_fields()   : _internal_metadata_.default_instance()), output);
+  }
+  // @@protoc_insertion_point(serialize_end:JetStream.RequestCmd)
+}
+
+::google::protobuf::uint8* RequestCmd::InternalSerializeWithCachedSizesToArray(
+    bool deterministic, ::google::protobuf::uint8* target) const {
+  (void)deterministic; // Unused
+  // @@protoc_insertion_point(serialize_to_array_start:JetStream.RequestCmd)
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  (void) cached_has_bits;
+
+  // int32 action = 1;
+  if (this->action() != 0) {
+    target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->action(), target);
+  }
+
+  if ((_internal_metadata_.have_unknown_fields() &&  ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
+    target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
+        (::google::protobuf::internal::GetProto3PreserveUnknownsDefault()   ? _internal_metadata_.unknown_fields()   : _internal_metadata_.default_instance()), target);
+  }
+  // @@protoc_insertion_point(serialize_to_array_end:JetStream.RequestCmd)
+  return target;
+}
+
+size_t RequestCmd::ByteSizeLong() const {
+// @@protoc_insertion_point(message_byte_size_start:JetStream.RequestCmd)
+  size_t total_size = 0;
+
+  if ((_internal_metadata_.have_unknown_fields() &&  ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
+    total_size +=
+      ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
+        (::google::protobuf::internal::GetProto3PreserveUnknownsDefault()   ? _internal_metadata_.unknown_fields()   : _internal_metadata_.default_instance()));
+  }
+  // int32 action = 1;
+  if (this->action() != 0) {
+    total_size += 1 +
+      ::google::protobuf::internal::WireFormatLite::Int32Size(
+        this->action());
+  }
+
+  int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
+  SetCachedSize(cached_size);
+  return total_size;
+}
+
+void RequestCmd::MergeFrom(const ::google::protobuf::Message& from) {
+// @@protoc_insertion_point(generalized_merge_from_start:JetStream.RequestCmd)
+  GOOGLE_DCHECK_NE(&from, this);
+  const RequestCmd* source =
+      ::google::protobuf::internal::DynamicCastToGenerated<const RequestCmd>(
+          &from);
+  if (source == NULL) {
+  // @@protoc_insertion_point(generalized_merge_from_cast_fail:JetStream.RequestCmd)
+    ::google::protobuf::internal::ReflectionOps::Merge(from, this);
+  } else {
+  // @@protoc_insertion_point(generalized_merge_from_cast_success:JetStream.RequestCmd)
+    MergeFrom(*source);
+  }
+}
+
+void RequestCmd::MergeFrom(const RequestCmd& from) {
+// @@protoc_insertion_point(class_specific_merge_from_start:JetStream.RequestCmd)
+  GOOGLE_DCHECK_NE(&from, this);
+  _internal_metadata_.MergeFrom(from._internal_metadata_);
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  (void) cached_has_bits;
+
+  if (from.action() != 0) {
+    set_action(from.action());
+  }
+}
+
+void RequestCmd::CopyFrom(const ::google::protobuf::Message& from) {
+// @@protoc_insertion_point(generalized_copy_from_start:JetStream.RequestCmd)
+  if (&from == this) return;
+  Clear();
+  MergeFrom(from);
+}
+
+void RequestCmd::CopyFrom(const RequestCmd& from) {
+// @@protoc_insertion_point(class_specific_copy_from_start:JetStream.RequestCmd)
+  if (&from == this) return;
+  Clear();
+  MergeFrom(from);
+}
+
+bool RequestCmd::IsInitialized() const {
+  return true;
+}
+
+void RequestCmd::Swap(RequestCmd* other) {
+  if (other == this) return;
+  InternalSwap(other);
+}
+void RequestCmd::InternalSwap(RequestCmd* other) {
+  using std::swap;
+  swap(action_, other->action_);
+  _internal_metadata_.Swap(&other->_internal_metadata_);
+}
+
+::google::protobuf::Metadata RequestCmd::GetMetadata() const {
+  protobuf_def_2eproto::protobuf_AssignDescriptorsOnce();
+  return ::protobuf_def_2eproto::file_level_metadata[kIndexInFileMessages];
+}
+
+
+// ===================================================================
+
+void Response::InitAsDefaultInstance() {
+}
+#if !defined(_MSC_VER) || _MSC_VER >= 1900
+const int Response::kRetFieldNumber;
+const int Response::kInfoFieldNumber;
+#endif  // !defined(_MSC_VER) || _MSC_VER >= 1900
+
+Response::Response()
+  : ::google::protobuf::Message(), _internal_metadata_(NULL) {
+  ::google::protobuf::internal::InitSCC(
+      &protobuf_def_2eproto::scc_info_Response.base);
+  SharedCtor();
+  // @@protoc_insertion_point(constructor:JetStream.Response)
+}
+Response::Response(const Response& from)
+  : ::google::protobuf::Message(),
+      _internal_metadata_(NULL) {
+  _internal_metadata_.MergeFrom(from._internal_metadata_);
+  info_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
+  if (from.info().size() > 0) {
+    info_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.info_);
+  }
+  ret_ = from.ret_;
+  // @@protoc_insertion_point(copy_constructor:JetStream.Response)
+}
+
+void Response::SharedCtor() {
+  info_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
+  ret_ = 0;
+}
+
+Response::~Response() {
+  // @@protoc_insertion_point(destructor:JetStream.Response)
+  SharedDtor();
+}
+
+void Response::SharedDtor() {
+  info_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
+}
+
+void Response::SetCachedSize(int size) const {
+  _cached_size_.Set(size);
+}
+const ::google::protobuf::Descriptor* Response::descriptor() {
+  ::protobuf_def_2eproto::protobuf_AssignDescriptorsOnce();
+  return ::protobuf_def_2eproto::file_level_metadata[kIndexInFileMessages].descriptor;
+}
+
+const Response& Response::default_instance() {
+  ::google::protobuf::internal::InitSCC(&protobuf_def_2eproto::scc_info_Response.base);
+  return *internal_default_instance();
+}
+
+
+void Response::Clear() {
+// @@protoc_insertion_point(message_clear_start:JetStream.Response)
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  // Prevent compiler warnings about cached_has_bits being unused
+  (void) cached_has_bits;
+
+  info_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
+  ret_ = 0;
+  _internal_metadata_.Clear();
+}
+
+bool Response::MergePartialFromCodedStream(
+    ::google::protobuf::io::CodedInputStream* input) {
+#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
+  ::google::protobuf::uint32 tag;
+  // @@protoc_insertion_point(parse_start:JetStream.Response)
+  for (;;) {
+    ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
+    tag = p.first;
+    if (!p.second) goto handle_unusual;
+    switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
+      // int32 ret = 1;
+      case 1: {
+        if (static_cast< ::google::protobuf::uint8>(tag) ==
+            static_cast< ::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
+
+          DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
+                   ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
+                 input, &ret_)));
+        } else {
+          goto handle_unusual;
+        }
+        break;
+      }
+
+      // string info = 2;
+      case 2: {
+        if (static_cast< ::google::protobuf::uint8>(tag) ==
+            static_cast< ::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
+          DO_(::google::protobuf::internal::WireFormatLite::ReadString(
+                input, this->mutable_info()));
+          DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
+            this->info().data(), static_cast<int>(this->info().length()),
+            ::google::protobuf::internal::WireFormatLite::PARSE,
+            "JetStream.Response.info"));
+        } else {
+          goto handle_unusual;
+        }
+        break;
+      }
+
+      default: {
+      handle_unusual:
+        if (tag == 0) {
+          goto success;
+        }
+        DO_(::google::protobuf::internal::WireFormat::SkipField(
+              input, tag, _internal_metadata_.mutable_unknown_fields()));
+        break;
+      }
+    }
+  }
+success:
+  // @@protoc_insertion_point(parse_success:JetStream.Response)
+  return true;
+failure:
+  // @@protoc_insertion_point(parse_failure:JetStream.Response)
+  return false;
+#undef DO_
+}
+
+void Response::SerializeWithCachedSizes(
+    ::google::protobuf::io::CodedOutputStream* output) const {
+  // @@protoc_insertion_point(serialize_start:JetStream.Response)
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  (void) cached_has_bits;
+
+  // int32 ret = 1;
+  if (this->ret() != 0) {
+    ::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->ret(), output);
+  }
+
+  // string info = 2;
+  if (this->info().size() > 0) {
+    ::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
+      this->info().data(), static_cast<int>(this->info().length()),
+      ::google::protobuf::internal::WireFormatLite::SERIALIZE,
+      "JetStream.Response.info");
+    ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
+      2, this->info(), output);
+  }
+
+  if ((_internal_metadata_.have_unknown_fields() &&  ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
+    ::google::protobuf::internal::WireFormat::SerializeUnknownFields(
+        (::google::protobuf::internal::GetProto3PreserveUnknownsDefault()   ? _internal_metadata_.unknown_fields()   : _internal_metadata_.default_instance()), output);
+  }
+  // @@protoc_insertion_point(serialize_end:JetStream.Response)
+}
+
+::google::protobuf::uint8* Response::InternalSerializeWithCachedSizesToArray(
+    bool deterministic, ::google::protobuf::uint8* target) const {
+  (void)deterministic; // Unused
+  // @@protoc_insertion_point(serialize_to_array_start:JetStream.Response)
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  (void) cached_has_bits;
+
+  // int32 ret = 1;
+  if (this->ret() != 0) {
+    target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->ret(), target);
+  }
+
+  // string info = 2;
+  if (this->info().size() > 0) {
+    ::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
+      this->info().data(), static_cast<int>(this->info().length()),
+      ::google::protobuf::internal::WireFormatLite::SERIALIZE,
+      "JetStream.Response.info");
+    target =
+      ::google::protobuf::internal::WireFormatLite::WriteStringToArray(
+        2, this->info(), target);
+  }
+
+  if ((_internal_metadata_.have_unknown_fields() &&  ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
+    target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
+        (::google::protobuf::internal::GetProto3PreserveUnknownsDefault()   ? _internal_metadata_.unknown_fields()   : _internal_metadata_.default_instance()), target);
+  }
+  // @@protoc_insertion_point(serialize_to_array_end:JetStream.Response)
+  return target;
+}
+
+size_t Response::ByteSizeLong() const {
+// @@protoc_insertion_point(message_byte_size_start:JetStream.Response)
+  size_t total_size = 0;
+
+  if ((_internal_metadata_.have_unknown_fields() &&  ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
+    total_size +=
+      ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
+        (::google::protobuf::internal::GetProto3PreserveUnknownsDefault()   ? _internal_metadata_.unknown_fields()   : _internal_metadata_.default_instance()));
+  }
+  // string info = 2;
+  if (this->info().size() > 0) {
+    total_size += 1 +
+      ::google::protobuf::internal::WireFormatLite::StringSize(
+        this->info());
+  }
+
+  // int32 ret = 1;
+  if (this->ret() != 0) {
+    total_size += 1 +
+      ::google::protobuf::internal::WireFormatLite::Int32Size(
+        this->ret());
+  }
+
+  int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
+  SetCachedSize(cached_size);
+  return total_size;
+}
+
+void Response::MergeFrom(const ::google::protobuf::Message& from) {
+// @@protoc_insertion_point(generalized_merge_from_start:JetStream.Response)
+  GOOGLE_DCHECK_NE(&from, this);
+  const Response* source =
+      ::google::protobuf::internal::DynamicCastToGenerated<const Response>(
+          &from);
+  if (source == NULL) {
+  // @@protoc_insertion_point(generalized_merge_from_cast_fail:JetStream.Response)
+    ::google::protobuf::internal::ReflectionOps::Merge(from, this);
+  } else {
+  // @@protoc_insertion_point(generalized_merge_from_cast_success:JetStream.Response)
+    MergeFrom(*source);
+  }
+}
+
+void Response::MergeFrom(const Response& from) {
+// @@protoc_insertion_point(class_specific_merge_from_start:JetStream.Response)
+  GOOGLE_DCHECK_NE(&from, this);
+  _internal_metadata_.MergeFrom(from._internal_metadata_);
+  ::google::protobuf::uint32 cached_has_bits = 0;
+  (void) cached_has_bits;
+
+  if (from.info().size() > 0) {
+
+    info_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.info_);
+  }
+  if (from.ret() != 0) {
+    set_ret(from.ret());
+  }
+}
+
+void Response::CopyFrom(const ::google::protobuf::Message& from) {
+// @@protoc_insertion_point(generalized_copy_from_start:JetStream.Response)
+  if (&from == this) return;
+  Clear();
+  MergeFrom(from);
+}
+
+void Response::CopyFrom(const Response& from) {
+// @@protoc_insertion_point(class_specific_copy_from_start:JetStream.Response)
+  if (&from == this) return;
+  Clear();
+  MergeFrom(from);
+}
+
+bool Response::IsInitialized() const {
+  return true;
+}
+
+void Response::Swap(Response* other) {
+  if (other == this) return;
+  InternalSwap(other);
+}
+void Response::InternalSwap(Response* other) {
+  using std::swap;
+  info_.Swap(&other->info_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(),
+    GetArenaNoVirtual());
+  swap(ret_, other->ret_);
+  _internal_metadata_.Swap(&other->_internal_metadata_);
+}
+
+::google::protobuf::Metadata Response::GetMetadata() const {
+  protobuf_def_2eproto::protobuf_AssignDescriptorsOnce();
+  return ::protobuf_def_2eproto::file_level_metadata[kIndexInFileMessages];
+}
+
+
+// @@protoc_insertion_point(namespace_scope)
+}  // namespace JetStream
+namespace google {
+namespace protobuf {
+template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::JetStream::RequestCmd* Arena::CreateMaybeMessage< ::JetStream::RequestCmd >(Arena* arena) {
+  return Arena::CreateInternal< ::JetStream::RequestCmd >(arena);
+}
+template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::JetStream::Response* Arena::CreateMaybeMessage< ::JetStream::Response >(Arena* arena) {
+  return Arena::CreateInternal< ::JetStream::Response >(arena);
+}
+}  // namespace protobuf
+}  // namespace google
+
+// @@protoc_insertion_point(global_scope)

+ 393 - 0
detect/stream/def.pb.h

@@ -0,0 +1,393 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: def.proto
+
+#ifndef PROTOBUF_INCLUDED_def_2eproto
+#define PROTOBUF_INCLUDED_def_2eproto
+
+#include <string>
+
+#include <google/protobuf/stubs/common.h>
+
+#if GOOGLE_PROTOBUF_VERSION < 3006001
+#error This file was generated by a newer version of protoc which is
+#error incompatible with your Protocol Buffer headers.  Please update
+#error your headers.
+#endif
+#if 3006001 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
+#error This file was generated by an older version of protoc which is
+#error incompatible with your Protocol Buffer headers.  Please
+#error regenerate this file with a newer version of protoc.
+#endif
+
+#include <google/protobuf/io/coded_stream.h>
+#include <google/protobuf/arena.h>
+#include <google/protobuf/arenastring.h>
+#include <google/protobuf/generated_message_table_driven.h>
+#include <google/protobuf/generated_message_util.h>
+#include <google/protobuf/inlined_string_field.h>
+#include <google/protobuf/metadata.h>
+#include <google/protobuf/message.h>
+#include <google/protobuf/repeated_field.h>  // IWYU pragma: export
+#include <google/protobuf/extension_set.h>  // IWYU pragma: export
+#include <google/protobuf/unknown_field_set.h>
+// @@protoc_insertion_point(includes)
+#define PROTOBUF_INTERNAL_EXPORT_protobuf_def_2eproto 
+
+namespace protobuf_def_2eproto {
+// Internal implementation detail -- do not use these members.
+struct TableStruct {
+  static const ::google::protobuf::internal::ParseTableField entries[];
+  static const ::google::protobuf::internal::AuxillaryParseTableField aux[];
+  static const ::google::protobuf::internal::ParseTable schema[2];
+  static const ::google::protobuf::internal::FieldMetadata field_metadata[];
+  static const ::google::protobuf::internal::SerializationTable serialization_table[];
+  static const ::google::protobuf::uint32 offsets[];
+};
+void AddDescriptors();
+}  // namespace protobuf_def_2eproto
+namespace JetStream {
+class RequestCmd;
+class RequestCmdDefaultTypeInternal;
+extern RequestCmdDefaultTypeInternal _RequestCmd_default_instance_;
+class Response;
+class ResponseDefaultTypeInternal;
+extern ResponseDefaultTypeInternal _Response_default_instance_;
+}  // namespace JetStream
+namespace google {
+namespace protobuf {
+template<> ::JetStream::RequestCmd* Arena::CreateMaybeMessage<::JetStream::RequestCmd>(Arena*);
+template<> ::JetStream::Response* Arena::CreateMaybeMessage<::JetStream::Response>(Arena*);
+}  // namespace protobuf
+}  // namespace google
+namespace JetStream {
+
+// ===================================================================
+
+class RequestCmd : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:JetStream.RequestCmd) */ {
+ public:
+  RequestCmd();
+  virtual ~RequestCmd();
+
+  RequestCmd(const RequestCmd& from);
+
+  inline RequestCmd& operator=(const RequestCmd& from) {
+    CopyFrom(from);
+    return *this;
+  }
+  #if LANG_CXX11
+  RequestCmd(RequestCmd&& from) noexcept
+    : RequestCmd() {
+    *this = ::std::move(from);
+  }
+
+  inline RequestCmd& operator=(RequestCmd&& from) noexcept {
+    if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
+      if (this != &from) InternalSwap(&from);
+    } else {
+      CopyFrom(from);
+    }
+    return *this;
+  }
+  #endif
+  static const ::google::protobuf::Descriptor* descriptor();
+  static const RequestCmd& default_instance();
+
+  static void InitAsDefaultInstance();  // FOR INTERNAL USE ONLY
+  static inline const RequestCmd* internal_default_instance() {
+    return reinterpret_cast<const RequestCmd*>(
+               &_RequestCmd_default_instance_);
+  }
+  static constexpr int kIndexInFileMessages =
+    0;
+
+  void Swap(RequestCmd* other);
+  friend void swap(RequestCmd& a, RequestCmd& b) {
+    a.Swap(&b);
+  }
+
+  // implements Message ----------------------------------------------
+
+  inline RequestCmd* New() const final {
+    return CreateMaybeMessage<RequestCmd>(NULL);
+  }
+
+  RequestCmd* New(::google::protobuf::Arena* arena) const final {
+    return CreateMaybeMessage<RequestCmd>(arena);
+  }
+  void CopyFrom(const ::google::protobuf::Message& from) final;
+  void MergeFrom(const ::google::protobuf::Message& from) final;
+  void CopyFrom(const RequestCmd& from);
+  void MergeFrom(const RequestCmd& from);
+  void Clear() final;
+  bool IsInitialized() const final;
+
+  size_t ByteSizeLong() const final;
+  bool MergePartialFromCodedStream(
+      ::google::protobuf::io::CodedInputStream* input) final;
+  void SerializeWithCachedSizes(
+      ::google::protobuf::io::CodedOutputStream* output) const final;
+  ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
+      bool deterministic, ::google::protobuf::uint8* target) const final;
+  int GetCachedSize() const final { return _cached_size_.Get(); }
+
+  private:
+  void SharedCtor();
+  void SharedDtor();
+  void SetCachedSize(int size) const final;
+  void InternalSwap(RequestCmd* other);
+  private:
+  inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
+    return NULL;
+  }
+  inline void* MaybeArenaPtr() const {
+    return NULL;
+  }
+  public:
+
+  ::google::protobuf::Metadata GetMetadata() const final;
+
+  // nested types ----------------------------------------------------
+
+  // accessors -------------------------------------------------------
+
+  // int32 action = 1;
+  void clear_action();
+  static const int kActionFieldNumber = 1;
+  ::google::protobuf::int32 action() const;
+  void set_action(::google::protobuf::int32 value);
+
+  // @@protoc_insertion_point(class_scope:JetStream.RequestCmd)
+ private:
+
+  ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
+  ::google::protobuf::int32 action_;
+  mutable ::google::protobuf::internal::CachedSize _cached_size_;
+  friend struct ::protobuf_def_2eproto::TableStruct;
+};
+// -------------------------------------------------------------------
+
+class Response : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:JetStream.Response) */ {
+ public:
+  Response();
+  virtual ~Response();
+
+  Response(const Response& from);
+
+  inline Response& operator=(const Response& from) {
+    CopyFrom(from);
+    return *this;
+  }
+  #if LANG_CXX11
+  Response(Response&& from) noexcept
+    : Response() {
+    *this = ::std::move(from);
+  }
+
+  inline Response& operator=(Response&& from) noexcept {
+    if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
+      if (this != &from) InternalSwap(&from);
+    } else {
+      CopyFrom(from);
+    }
+    return *this;
+  }
+  #endif
+  static const ::google::protobuf::Descriptor* descriptor();
+  static const Response& default_instance();
+
+  static void InitAsDefaultInstance();  // FOR INTERNAL USE ONLY
+  static inline const Response* internal_default_instance() {
+    return reinterpret_cast<const Response*>(
+               &_Response_default_instance_);
+  }
+  static constexpr int kIndexInFileMessages =
+    1;
+
+  void Swap(Response* other);
+  friend void swap(Response& a, Response& b) {
+    a.Swap(&b);
+  }
+
+  // implements Message ----------------------------------------------
+
+  inline Response* New() const final {
+    return CreateMaybeMessage<Response>(NULL);
+  }
+
+  Response* New(::google::protobuf::Arena* arena) const final {
+    return CreateMaybeMessage<Response>(arena);
+  }
+  void CopyFrom(const ::google::protobuf::Message& from) final;
+  void MergeFrom(const ::google::protobuf::Message& from) final;
+  void CopyFrom(const Response& from);
+  void MergeFrom(const Response& from);
+  void Clear() final;
+  bool IsInitialized() const final;
+
+  size_t ByteSizeLong() const final;
+  bool MergePartialFromCodedStream(
+      ::google::protobuf::io::CodedInputStream* input) final;
+  void SerializeWithCachedSizes(
+      ::google::protobuf::io::CodedOutputStream* output) const final;
+  ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
+      bool deterministic, ::google::protobuf::uint8* target) const final;
+  int GetCachedSize() const final { return _cached_size_.Get(); }
+
+  private:
+  void SharedCtor();
+  void SharedDtor();
+  void SetCachedSize(int size) const final;
+  void InternalSwap(Response* other);
+  private:
+  inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
+    return NULL;
+  }
+  inline void* MaybeArenaPtr() const {
+    return NULL;
+  }
+  public:
+
+  ::google::protobuf::Metadata GetMetadata() const final;
+
+  // nested types ----------------------------------------------------
+
+  // accessors -------------------------------------------------------
+
+  // string info = 2;
+  void clear_info();
+  static const int kInfoFieldNumber = 2;
+  const ::std::string& info() const;
+  void set_info(const ::std::string& value);
+  #if LANG_CXX11
+  void set_info(::std::string&& value);
+  #endif
+  void set_info(const char* value);
+  void set_info(const char* value, size_t size);
+  ::std::string* mutable_info();
+  ::std::string* release_info();
+  void set_allocated_info(::std::string* info);
+
+  // int32 ret = 1;
+  void clear_ret();
+  static const int kRetFieldNumber = 1;
+  ::google::protobuf::int32 ret() const;
+  void set_ret(::google::protobuf::int32 value);
+
+  // @@protoc_insertion_point(class_scope:JetStream.Response)
+ private:
+
+  ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
+  ::google::protobuf::internal::ArenaStringPtr info_;
+  ::google::protobuf::int32 ret_;
+  mutable ::google::protobuf::internal::CachedSize _cached_size_;
+  friend struct ::protobuf_def_2eproto::TableStruct;
+};
+// ===================================================================
+
+
+// ===================================================================
+
+#ifdef __GNUC__
+  #pragma GCC diagnostic push
+  #pragma GCC diagnostic ignored "-Wstrict-aliasing"
+#endif  // __GNUC__
+// RequestCmd
+
+// int32 action = 1;
+inline void RequestCmd::clear_action() {
+  action_ = 0;
+}
+inline ::google::protobuf::int32 RequestCmd::action() const {
+  // @@protoc_insertion_point(field_get:JetStream.RequestCmd.action)
+  return action_;
+}
+inline void RequestCmd::set_action(::google::protobuf::int32 value) {
+  
+  action_ = value;
+  // @@protoc_insertion_point(field_set:JetStream.RequestCmd.action)
+}
+
+// -------------------------------------------------------------------
+
+// Response
+
+// int32 ret = 1;
+inline void Response::clear_ret() {
+  ret_ = 0;
+}
+inline ::google::protobuf::int32 Response::ret() const {
+  // @@protoc_insertion_point(field_get:JetStream.Response.ret)
+  return ret_;
+}
+inline void Response::set_ret(::google::protobuf::int32 value) {
+  
+  ret_ = value;
+  // @@protoc_insertion_point(field_set:JetStream.Response.ret)
+}
+
+// string info = 2;
+inline void Response::clear_info() {
+  info_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
+}
+inline const ::std::string& Response::info() const {
+  // @@protoc_insertion_point(field_get:JetStream.Response.info)
+  return info_.GetNoArena();
+}
+inline void Response::set_info(const ::std::string& value) {
+  
+  info_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
+  // @@protoc_insertion_point(field_set:JetStream.Response.info)
+}
+#if LANG_CXX11
+inline void Response::set_info(::std::string&& value) {
+  
+  info_.SetNoArena(
+    &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value));
+  // @@protoc_insertion_point(field_set_rvalue:JetStream.Response.info)
+}
+#endif
+inline void Response::set_info(const char* value) {
+  GOOGLE_DCHECK(value != NULL);
+  
+  info_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
+  // @@protoc_insertion_point(field_set_char:JetStream.Response.info)
+}
+inline void Response::set_info(const char* value, size_t size) {
+  
+  info_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
+      ::std::string(reinterpret_cast<const char*>(value), size));
+  // @@protoc_insertion_point(field_set_pointer:JetStream.Response.info)
+}
+inline ::std::string* Response::mutable_info() {
+  
+  // @@protoc_insertion_point(field_mutable:JetStream.Response.info)
+  return info_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
+}
+inline ::std::string* Response::release_info() {
+  // @@protoc_insertion_point(field_release:JetStream.Response.info)
+  
+  return info_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
+}
+inline void Response::set_allocated_info(::std::string* info) {
+  if (info != NULL) {
+    
+  } else {
+    
+  }
+  info_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), info);
+  // @@protoc_insertion_point(field_set_allocated:JetStream.Response.info)
+}
+
+#ifdef __GNUC__
+  #pragma GCC diagnostic pop
+#endif  // __GNUC__
+// -------------------------------------------------------------------
+
+
+// @@protoc_insertion_point(namespace_scope)
+
+}  // namespace JetStream
+
+// @@protoc_insertion_point(global_scope)
+
+#endif  // PROTOBUF_INCLUDED_def_2eproto

+ 17 - 0
detect/stream/def.proto

@@ -0,0 +1,17 @@
+syntax = "proto3";
+package JetStream;
+
+message RequestCmd
+{
+  int32 action=1;  //
+}
+
+message Response{
+  int32 ret=1;
+  string info=2;
+}
+
+
+service NavExcutor{
+  rpc DetectStream(RequestCmd) returns(stream Response){}
+}

+ 1 - 0
detect/stream/proto.sh

@@ -0,0 +1 @@
+protoc --cpp_out=./ --grpc_out=./ --plugin=protoc-gen-grpc=`which grpc_cpp_plugin` -I=./ def.proto

+ 69 - 0
detect/wheel-detector.cpp

@@ -0,0 +1,69 @@
+#include "wheel-detector.h"
+
+TensorrtWheelDetector::TensorrtWheelDetector(const std::string &enginfile){
+    yolov8_=new YOLOv8_seg(enginfile);
+    yolov8_->make_pipe(false);
+    imgsz_=cv::Size{640, 480};
+    seg_h_        = 120;
+    seg_w_        = 160;
+    seg_channels_ = 32;
+}
+
+TensorrtWheelDetector::~TensorrtWheelDetector(){
+    if(yolov8_!=nullptr){
+        delete yolov8_;
+        yolov8_=nullptr;
+    }
+
+}
+
+bool TensorrtWheelDetector::detect(cv::Mat& img,std::vector<Object>& objs){
+    if(yolov8_==nullptr){
+        return false;
+    }
+    if(img.size()!=imgsz_){
+        printf("imgsz required [%d,%d],but input is [%d,%d]\n",imgsz_.height,imgsz_.width,img.rows,img.cols);
+        return false;
+    }
+    yolov8_->copy_from_Mat(img, imgsz_);
+    yolov8_->infer();
+    float score_thres=0.9;
+    float iou_thres=0.65;
+    int topk=10;
+    yolov8_->postprocess(objs, score_thres, iou_thres, topk, seg_channels_, seg_h_, seg_w_);
+    return true;
+
+}
+
+bool TensorrtWheelDetector::detect(cv::Mat& img,std::vector<Object>& objs,cv::Mat& res){
+    if(detect(img,objs))
+    {
+        const std::vector<std::string> classes={"none","wheel"};
+        const std::vector<std::vector<unsigned int>> colors = {{0, 114, 189},   {0, 255, 0}};
+        const std::vector<std::vector<unsigned int>> mask_colors = {{255, 56, 56},  {255, 0, 0}};
+        yolov8_->draw_objects(img, res, objs, classes, colors, mask_colors);
+        return true;
+    }else{
+        return false;
+    }
+}
+
+std::vector<cv::Point> TensorrtWheelDetector::getPointsFromObj(Object obj){
+    std::vector<cv::Point> ret;
+    int x=int(obj.rect.x+0.5);
+    int y=int(obj.rect.y+0.5);
+    int width=int(obj.rect.width);
+    int height=int(obj.rect.height);
+    
+    printf("mask type:%d\n",obj.boxMask.type());
+    for(int i=0;i<height;++i){
+        for(int j=0;j<width;++j){
+            //printf("%d    ",obj.boxMask.at<uchar>(i,j));
+            if(obj.boxMask.at<uchar>(i,j)!=0){
+                ret.push_back(cv::Point(x+j,y+i));
+            }
+        }
+    }
+    //printf("\n");
+    return ret;
+}

+ 25 - 0
detect/wheel-detector.h

@@ -0,0 +1,25 @@
+#ifndef WHELL_DETECTOR__HH___
+#define WWHELL_DETECTOR__HH___
+#include "yolov8-seg.h"
+
+class TensorrtWheelDetector{
+public:
+    TensorrtWheelDetector(const std::string &enginfile);
+    ~TensorrtWheelDetector();
+
+
+    bool detect(cv::Mat& img,std::vector<Object>& objs);
+    bool detect(cv::Mat& img,std::vector<Object>& objs,cv::Mat& res);
+
+    static std::vector<cv::Point> getPointsFromObj(Object obj);
+
+private:
+    YOLOv8_seg*  yolov8_;
+    cv::Size     imgsz_;
+    int      seg_h_        = 120;
+    int      seg_w_        = 160;
+    int      seg_channels_ = 32;
+
+};
+
+#endif

+ 319 - 0
detect/yolov8-seg.cpp

@@ -0,0 +1,319 @@
+//
+// Created by ubuntu on 3/16/23.
+//
+
+#include "yolov8-seg.h"
+
+
+using namespace seg;
+
+YOLOv8_seg::YOLOv8_seg(const std::string& engine_file_path)
+{
+    std::ifstream file(engine_file_path, std::ios::binary);
+    assert(file.good());
+    file.seekg(0, std::ios::end);
+    auto size = file.tellg();
+    file.seekg(0, std::ios::beg);
+    char* trtModelStream = new char[size];
+    assert(trtModelStream);
+    file.read(trtModelStream, size);
+    file.close();
+    initLibNvInferPlugins(&this->gLogger, "");
+    this->runtime = nvinfer1::createInferRuntime(this->gLogger);
+    assert(this->runtime != nullptr);
+
+    this->engine = this->runtime->deserializeCudaEngine(trtModelStream, size);
+    assert(this->engine != nullptr);
+    delete[] trtModelStream;
+    this->context = this->engine->createExecutionContext();
+
+    assert(this->context != nullptr);
+    cudaStreamCreate(&this->stream);
+    this->num_bindings = this->engine->getNbBindings();
+
+    for (int i = 0; i < this->num_bindings; ++i) {
+        Binding            binding;
+        nvinfer1::Dims     dims;
+        nvinfer1::DataType dtype = this->engine->getBindingDataType(i);
+        std::string        name  = this->engine->getBindingName(i);
+        binding.name             = name;
+        binding.dsize            = type_to_size(dtype);
+
+        bool IsInput = engine->bindingIsInput(i);
+        if (IsInput) {
+            this->num_inputs += 1;
+            dims         = this->engine->getProfileDimensions(i, 0, nvinfer1::OptProfileSelector::kMAX);
+            binding.size = get_size_by_dims(dims);
+            binding.dims = dims;
+            this->input_bindings.push_back(binding);
+            // set max opt shape
+            this->context->setBindingDimensions(i, dims);
+        }
+        else {
+            dims         = this->context->getBindingDimensions(i);
+            binding.size = get_size_by_dims(dims);
+            binding.dims = dims;
+            this->output_bindings.push_back(binding);
+            this->num_outputs += 1;
+        }
+        // printf("name: %s, size: %ld, dims: %d %d %d %d %d\n", 
+        //         name.c_str(), binding.dsize, dims.nbDims, dims.d[0], dims.d[1], dims.d[2], dims.d[3]);
+    }
+}
+
+YOLOv8_seg::~YOLOv8_seg()
+{
+    this->context->destroy();
+    this->engine->destroy();
+    this->runtime->destroy();
+    cudaStreamDestroy(this->stream);
+    for (auto& ptr : this->device_ptrs) {
+        CHECK(cudaFree(ptr));
+    }
+
+    for (auto& ptr : this->host_ptrs) {
+        CHECK(cudaFreeHost(ptr));
+    }
+}
+
+void YOLOv8_seg::make_pipe(bool warmup)
+{
+
+    for (auto& bindings : this->input_bindings) {
+        void* d_ptr;
+        CHECK(cudaMalloc(&d_ptr, bindings.size * bindings.dsize));
+        this->device_ptrs.push_back(d_ptr);
+    }
+
+    for (auto& bindings : this->output_bindings) {
+        void * d_ptr, *h_ptr;
+        size_t size = bindings.size * bindings.dsize;
+        CHECK(cudaMalloc(&d_ptr, size));
+        CHECK(cudaHostAlloc(&h_ptr, size, 0));
+        this->device_ptrs.push_back(d_ptr);
+        this->host_ptrs.push_back(h_ptr);
+    }
+
+    if (warmup) {
+        for (int i = 0; i < 10; i++) {
+            for (auto& bindings : this->input_bindings) {
+                size_t size  = bindings.size * bindings.dsize;
+                void*  h_ptr = malloc(size);
+                memset(h_ptr, 0, size);
+                CHECK(cudaMemcpyAsync(this->device_ptrs[0], h_ptr, size, cudaMemcpyHostToDevice, this->stream));
+                free(h_ptr);
+            }
+            this->infer();
+        }
+        printf("model warmup 10 times\n");
+    }
+}
+
+void YOLOv8_seg::letterbox(const cv::Mat& image, cv::Mat& out, cv::Size& size)
+{
+    const float inp_h  = size.height;
+    const float inp_w  = size.width;
+    float       height = image.rows;
+    float       width  = image.cols;
+
+    float r    = std::min(inp_h / height, inp_w / width);
+    int   padw = std::round(width * r);
+    int   padh = std::round(height * r);
+
+    cv::Mat tmp;
+    if ((int)width != padw || (int)height != padh) {
+        cv::resize(image, tmp, cv::Size(padw, padh));
+    }
+    else {
+        tmp = image.clone();
+    }
+
+    float dw = inp_w - padw;
+    float dh = inp_h - padh;
+
+    dw /= 2.0f;
+    dh /= 2.0f;
+    int top    = int(std::round(dh - 0.1f));
+    int bottom = int(std::round(dh + 0.1f));
+    int left   = int(std::round(dw - 0.1f));
+    int right  = int(std::round(dw + 0.1f));
+
+    cv::copyMakeBorder(tmp, tmp, top, bottom, left, right, cv::BORDER_CONSTANT, {114, 114, 114});
+
+    cv::dnn::blobFromImage(tmp, out, 1 / 255.f, cv::Size(), cv::Scalar(0, 0, 0), true, false, CV_32F);
+    this->pparam.ratio  = 1 / r;
+    this->pparam.dw     = dw;
+    this->pparam.dh     = dh;
+    this->pparam.height = height;
+    this->pparam.width  = width;
+}
+
+void YOLOv8_seg::copy_from_Mat(const cv::Mat& image)
+{
+    cv::Mat  nchw;
+    auto&    in_binding = this->input_bindings[0];
+    auto     width      = in_binding.dims.d[3];
+    auto     height     = in_binding.dims.d[2];
+    cv::Size size{width, height};
+    this->letterbox(image, nchw, size);
+
+    this->context->setBindingDimensions(0, nvinfer1::Dims{4, {1, 3, height, width}});
+
+    CHECK(cudaMemcpyAsync(
+        this->device_ptrs[0], nchw.ptr<float>(), nchw.total() * nchw.elemSize(), cudaMemcpyHostToDevice, this->stream));
+}
+
+void YOLOv8_seg::copy_from_Mat(const cv::Mat& image, cv::Size& size)
+{
+    cv::Mat nchw;
+    this->letterbox(image, nchw, size);
+    this->context->setBindingDimensions(0, nvinfer1::Dims{4, {1, 3, size.height, size.width}});
+    CHECK(cudaMemcpyAsync(
+        this->device_ptrs[0], nchw.ptr<float>(), nchw.total() * nchw.elemSize(), cudaMemcpyHostToDevice, this->stream));
+}
+
+void YOLOv8_seg::infer()
+{
+
+    this->context->enqueueV2(this->device_ptrs.data(), this->stream, nullptr);
+    for (int i = 0; i < this->num_outputs; i++) {
+        size_t osize = this->output_bindings[i].size * this->output_bindings[i].dsize;
+        CHECK(cudaMemcpyAsync(
+            this->host_ptrs[i], this->device_ptrs[i + this->num_inputs], osize, cudaMemcpyDeviceToHost, this->stream));
+    }
+    cudaStreamSynchronize(this->stream);
+}
+
+void YOLOv8_seg::postprocess(
+    std::vector<Object>& objs, float score_thres, float iou_thres, int topk, int seg_channels, int seg_h, int seg_w)
+{
+    objs.clear();
+    auto input_h      = this->input_bindings[0].dims.d[2];
+    auto input_w      = this->input_bindings[0].dims.d[3];
+    auto num_anchors  = this->output_bindings[0].dims.d[1];
+    auto num_channels = this->output_bindings[0].dims.d[2];
+
+    auto& dw     = this->pparam.dw;
+    auto& dh     = this->pparam.dh;
+    auto& width  = this->pparam.width;
+    auto& height = this->pparam.height;
+    auto& ratio  = this->pparam.ratio;
+
+    auto*   output = static_cast<float*>(this->host_ptrs[0]);
+    cv::Mat protos = cv::Mat(seg_channels, seg_h * seg_w, CV_32F, static_cast<float*>(this->host_ptrs[1]));
+
+    std::vector<int>      labels;
+    std::vector<float>    scores;
+    std::vector<cv::Rect> bboxes;
+    std::vector<cv::Mat>  mask_confs;
+    std::vector<int>      indices;
+
+    for (int i = 0; i < num_anchors; i++) {
+        float* ptr   = output + i * num_channels;
+        float  score = *(ptr + 4);
+		
+         /*if (score > score_thres) {
+             printf("num_channels: %d, score: %f\n", num_channels, score);
+         }*/
+	
+        if (score > score_thres) {
+            float x0 = *ptr++ - dw;
+            float y0 = *ptr++ - dh;
+            float x1 = *ptr++ - dw;
+            float y1 = *ptr++ - dh;
+
+            x0 = clamp(x0 * ratio, 0.f, width);
+            y0 = clamp(y0 * ratio, 0.f, height);
+            x1 = clamp(x1 * ratio, 0.f, width);
+            y1 = clamp(y1 * ratio, 0.f, height);
+
+            int     label     = *(++ptr);
+            cv::Mat mask_conf = cv::Mat(1, seg_channels, CV_32F, ++ptr);
+            mask_confs.push_back(mask_conf);
+            labels.push_back(label);
+            scores.push_back(score);
+            bboxes.push_back(cv::Rect_<float>(x0, y0, x1 - x0, y1 - y0));
+        }
+    }
+
+#if defined(BATCHED_NMS)
+    cv::dnn::NMSBoxesBatched(bboxes, scores, labels, score_thres, iou_thres, indices);
+#else
+    cv::dnn::NMSBoxes(bboxes, scores, score_thres, iou_thres, indices);
+#endif
+
+    cv::Mat masks;
+    int     cnt = 0;
+    for (auto& i : indices) {
+        if (cnt >= topk) {
+            break;
+        }
+        cv::Rect tmp = bboxes[i];
+        Object   obj;
+        obj.label = labels[i];
+        obj.rect  = tmp;
+        obj.prob  = scores[i];
+        masks.push_back(mask_confs[i]);
+        objs.push_back(obj);
+        cnt += 1;
+    }
+    if (masks.empty()) {
+        // masks is empty
+    }
+    else {
+        cv::Mat matmulRes = (masks * protos).t();
+        cv::Mat maskMat   = matmulRes.reshape(indices.size(), {seg_h, seg_w});
+
+        std::vector<cv::Mat> maskChannels;
+        cv::split(maskMat, maskChannels);
+        int scale_dw = dw / input_w * seg_w;
+        int scale_dh = dh / input_h * seg_h;
+
+        cv::Rect roi(scale_dw, scale_dh, seg_w - 2 * scale_dw, seg_h - 2 * scale_dh);
+
+        for (int i = 0; i < indices.size(); i++) {
+            cv::Mat dest, mask;
+            cv::exp(-maskChannels[i], dest);
+            dest = 1.0 / (1.0 + dest);
+            dest = dest(roi);
+            cv::resize(dest, mask, cv::Size((int)width, (int)height), cv::INTER_LINEAR);
+            objs[i].boxMask = mask(objs[i].rect) > 0.5f;
+        }
+    }
+}
+
+void YOLOv8_seg::draw_objects(const cv::Mat&                                image,
+                              cv::Mat&                                      res,
+                              const std::vector<Object>&                    objs,
+                              const std::vector<std::string>&               CLASS_NAMES,
+                              const std::vector<std::vector<unsigned int>>& COLORS,
+                              const std::vector<std::vector<unsigned int>>& MASK_COLORS)
+{
+    res          = image.clone();
+    cv::Mat mask = image.clone();
+    for (auto& obj : objs) {
+        int        idx   = obj.label;
+        cv::Scalar color = cv::Scalar(COLORS[idx][0], COLORS[idx][1], COLORS[idx][2]);
+        cv::Scalar mask_color =
+            cv::Scalar(MASK_COLORS[idx % 20][0], MASK_COLORS[idx % 20][1], MASK_COLORS[idx % 20][2]);
+        cv::rectangle(res, obj.rect, color, 2);
+
+        char text[256];
+        sprintf(text, "%s %.1f%%", CLASS_NAMES[idx].c_str(), obj.prob * 100);
+        mask(obj.rect).setTo(mask_color, obj.boxMask);
+
+        int      baseLine   = 0;
+        cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.4, 1, &baseLine);
+
+        int x = (int)obj.rect.x;
+        int y = (int)obj.rect.y + 1;
+
+        if (y > res.rows)
+            y = res.rows;
+
+        cv::rectangle(res, cv::Rect(x, y, label_size.width, label_size.height + baseLine), {0, 0, 255}, -1);
+
+        cv::putText(res, text, cv::Point(x, y + label_size.height), cv::FONT_HERSHEY_SIMPLEX, 0.4, {255, 255, 255}, 1);
+    }
+    cv::addWeighted(res, 0.5, mask, 0.8, 1, res);
+}

+ 53 - 0
detect/yolov8-seg.h

@@ -0,0 +1,53 @@
+//
+// Created by ubuntu on 3/16/23.
+//
+#ifndef JETSON_SEGMENT_YOLOV8_SEG_HPP
+#define JETSON_SEGMENT_YOLOV8_SEG_HPP
+#include "NvInferPlugin.h"
+#include "common.hpp"
+#include <fstream>
+
+using namespace seg;
+
+class YOLOv8_seg {
+public:
+    explicit YOLOv8_seg(const std::string& engine_file_path);
+    ~YOLOv8_seg();
+
+    void                 make_pipe(bool warmup = true);
+    void                 copy_from_Mat(const cv::Mat& image);
+    void                 copy_from_Mat(const cv::Mat& image, cv::Size& size);
+    void                 letterbox(const cv::Mat& image, cv::Mat& out, cv::Size& size);
+    void                 infer();
+    void                 postprocess(std::vector<Object>& objs,
+                                     float                score_thres  = 0.25f,
+                                     float                iou_thres    = 0.65f,
+                                     int                  topk         = 100,
+                                     int                  seg_channels = 32,
+                                     int                  seg_h        = 160,
+                                     int                  seg_w        = 160);
+    static void          draw_objects(const cv::Mat&                                image,
+                                      cv::Mat&                                      res,
+                                      const std::vector<Object>&                    objs,
+                                      const std::vector<std::string>&               CLASS_NAMES,
+                                      const std::vector<std::vector<unsigned int>>& COLORS,
+                                      const std::vector<std::vector<unsigned int>>& MASK_COLORS);
+    int                  num_bindings;
+    int                  num_inputs  = 0;
+    int                  num_outputs = 0;
+    std::vector<Binding> input_bindings;
+    std::vector<Binding> output_bindings;
+    std::vector<void*>   host_ptrs;
+    std::vector<void*>   device_ptrs;
+
+    PreParam pparam;
+
+private:
+    nvinfer1::ICudaEngine*       engine  = nullptr;
+    nvinfer1::IRuntime*          runtime = nullptr;
+    nvinfer1::IExecutionContext* context = nullptr;
+    cudaStream_t                 stream  = nullptr;
+    Logger                       gLogger{nvinfer1::ILogger::Severity::kERROR};
+};
+
+#endif  // JETSON_SEGMENT_YOLOV8_SEG_HPP

+ 7 - 0
main.cpp

@@ -0,0 +1,7 @@
+#include <iostream>
+
+int main() {
+    printf("----------------------------------------------------------\n");
+    printf("----------------------------------------------------------\n");
+    return EXIT_SUCCESS;
+}

+ 183 - 0
model/inference.cpp

@@ -0,0 +1,183 @@
+#include "inference.h"
+
+Inference::Inference(const std::string &onnxModelPath, const cv::Size &modelInputShape, const std::string &classesTxtFile, const bool &runWithCuda)
+{
+    modelPath = onnxModelPath;
+    modelShape = modelInputShape;
+    classesPath = classesTxtFile;
+    cudaEnabled = runWithCuda;
+
+    loadOnnxNetwork();
+    // loadClassesFromFile(); The classes are hard-coded for this example
+}
+
+std::vector<Detection> Inference::runInference(const cv::Mat &input)
+{
+    cv::Mat modelInput = input;
+    if (letterBoxForSquare && modelShape.width == modelShape.height)
+        modelInput = formatToSquare(modelInput);
+
+    cv::Mat blob;
+    cv::dnn::blobFromImage(modelInput, blob, 1.0/255.0, modelShape, cv::Scalar(), true, false);
+    net.setInput(blob);
+
+    std::vector<cv::Mat> outputs;
+    net.forward(outputs, net.getUnconnectedOutLayersNames());
+
+    int rows = outputs[0].size[1];
+    int dimensions = outputs[0].size[2];
+
+    bool yolov8 = false;
+    // yolov5 has an output of shape (batchSize, 25200, 85) (Num classes + box[x,y,w,h] + confidence[c])
+    // yolov8 has an output of shape (batchSize, 84,  8400) (Num classes + box[x,y,w,h])
+    if (dimensions > rows) // Check if the shape[2] is more than shape[1] (yolov8)
+    {
+        yolov8 = true;
+        rows = outputs[0].size[2];
+        dimensions = outputs[0].size[1];
+
+        outputs[0] = outputs[0].reshape(1, dimensions);
+        cv::transpose(outputs[0], outputs[0]);
+    }
+    float *data = (float *)outputs[0].data;
+
+    float x_factor = modelInput.cols / modelShape.width;
+    float y_factor = modelInput.rows / modelShape.height;
+
+    std::vector<int> class_ids;
+    std::vector<float> confidences;
+    std::vector<cv::Rect> boxes;
+
+    for (int i = 0; i < rows; ++i)
+    {
+        if (yolov8)
+        {
+            float *classes_scores = data+4;
+
+            cv::Mat scores(1, classes.size(), CV_32FC1, classes_scores);
+            cv::Point class_id;
+            double maxClassScore;
+
+            minMaxLoc(scores, 0, &maxClassScore, 0, &class_id);
+
+            if (maxClassScore > modelScoreThreshold)
+            {
+                confidences.push_back(maxClassScore);
+                class_ids.push_back(class_id.x);
+
+                float x = data[0];
+                float y = data[1];
+                float w = data[2];
+                float h = data[3];
+
+                int left = int((x - 0.5 * w) * x_factor);
+                int top = int((y - 0.5 * h) * y_factor);
+
+                int width = int(w * x_factor);
+                int height = int(h * y_factor);
+
+                boxes.push_back(cv::Rect(left, top, width, height));
+            }
+        }
+        else // yolov5
+        {
+            float confidence = data[4];
+
+            if (confidence >= modelConfidenceThreshold)
+            {
+                float *classes_scores = data+5;
+
+                cv::Mat scores(1, classes.size(), CV_32FC1, classes_scores);
+                cv::Point class_id;
+                double max_class_score;
+
+                minMaxLoc(scores, 0, &max_class_score, 0, &class_id);
+
+                if (max_class_score > modelScoreThreshold)
+                {
+                    confidences.push_back(confidence);
+                    class_ids.push_back(class_id.x);
+
+                    float x = data[0];
+                    float y = data[1];
+                    float w = data[2];
+                    float h = data[3];
+
+                    int left = int((x - 0.5 * w) * x_factor);
+                    int top = int((y - 0.5 * h) * y_factor);
+
+                    int width = int(w * x_factor);
+                    int height = int(h * y_factor);
+
+                    boxes.push_back(cv::Rect(left, top, width, height));
+                }
+            }
+        }
+
+        data += dimensions;
+    }
+
+    std::vector<int> nms_result;
+    cv::dnn::NMSBoxes(boxes, confidences, modelScoreThreshold, modelNMSThreshold, nms_result);
+
+    std::vector<Detection> detections{};
+    for (unsigned long i = 0; i < nms_result.size(); ++i)
+    {
+        int idx = nms_result[i];
+
+        Detection result;
+        result.class_id = class_ids[idx];
+        result.confidence = confidences[idx];
+
+        std::random_device rd;
+        std::mt19937 gen(rd());
+        std::uniform_int_distribution<int> dis(100, 255);
+        result.color = cv::Scalar(dis(gen),
+                                  dis(gen),
+                                  dis(gen));
+
+        result.className = classes[result.class_id];
+        result.box = boxes[idx];
+
+        detections.push_back(result);
+    }
+
+    return detections;
+}
+
+void Inference::loadClassesFromFile()
+{
+    std::ifstream inputFile(classesPath);
+    if (inputFile.is_open())
+    {
+        std::string classLine;
+        while (std::getline(inputFile, classLine))
+            classes.push_back(classLine);
+        inputFile.close();
+    }
+}
+
+void Inference::loadOnnxNetwork()
+{
+    net = cv::dnn::readNetFromONNX(modelPath);
+    if (cudaEnabled)
+    {
+        net.setPreferableBackend(cv::dnn::DNN_BACKEND_CUDA);
+        net.setPreferableTarget(cv::dnn::DNN_TARGET_CUDA);
+    }
+    else
+    {
+        net.setPreferableBackend(cv::dnn::DNN_BACKEND_OPENCV);
+        net.setPreferableTarget(cv::dnn::DNN_TARGET_CPU);
+    }
+}
+
+cv::Mat Inference::formatToSquare(const cv::Mat &source)
+{
+    int col = source.cols;
+    int row = source.rows;
+    int _max = MAX(col, row);
+    cv::Mat result = cv::Mat::zeros(_max, _max, CV_8UC3);
+    source.copyTo(result(cv::Rect(0, 0, col, row)));
+    return result;
+}

+ 50 - 0
model/inference.h

@@ -0,0 +1,50 @@
+#pragma once
+
+// Cpp native
+#include <fstream>
+#include <vector>
+#include <string>
+#include <random>
+
+// OpenCV / DNN / Inference
+#include <opencv2/imgproc.hpp>
+#include <opencv2/opencv.hpp>
+#include <opencv2/dnn.hpp>
+
+struct Detection
+{
+    int class_id{0};
+    std::string className{};
+    float confidence{0.0};
+    cv::Scalar color{};
+    cv::Rect box{};
+};
+
+class Inference
+{
+public:
+    Inference(const std::string &onnxModelPath, const cv::Size &modelInputShape = {640, 640}, const std::string &classesTxtFile = "", const bool &runWithCuda = false);
+    std::vector<Detection> runInference(const cv::Mat &input);
+
+private:
+    void loadClassesFromFile();
+    void loadOnnxNetwork();
+    cv::Mat formatToSquare(const cv::Mat &source);
+
+    std::string modelPath{};
+    std::string classesPath{};
+    bool cudaEnabled{};
+
+    std::vector<std::string> classes{"car", "wheel"};
+
+    cv::Size2f modelShape{};
+
+    float modelConfidenceThreshold {0.25};
+    float modelScoreThreshold      {0.45};
+    float modelNMSThreshold        {0.50};
+
+    bool letterBoxForSquare = true;
+
+    cv::dnn::Net net;
+};
+

+ 29 - 0
tof3DMain.cpp

@@ -0,0 +1,29 @@
+//
+// Created by zx on 2023/10/7.
+//
+#include <iostream>
+#include "log/log.h"
+#include "tof3d_manager.h"
+#include "communication/rabbitmq_communication.h"
+
+int main(int argc, char * argv[]) {
+    // 初始化日志系统
+    ZX::InitGlog(PROJECT_NAME, ETC_PATH PROJECT_NAME "/LogInfo/");
+
+    /*LOG(INFO) << "begain init rabbitmq.";
+    auto ret = RabbitmqCommunicationTof3D::get_instance_references().rabbitmq_init_from_protobuf(ETC_PATH"Tof3d/rabbitmq.prototxt");
+    if (ret != SUCCESS) {
+        LOG(ERROR) << "system communication mq init failed: " << ret.to_string();
+        return -1;
+    }
+    RabbitmqCommunicationTof3D::get_instance_references().set_encapsulate_status_cycle_time(100);
+*/
+    tof3dManager tof3d;
+    auto err = tof3d.init();
+    LOG(INFO) << err.to_string();
+
+    while (1) {
+        usleep(2000 * 1000);
+    }
+    return err.get_error_code();
+}

文件差异内容过多而无法显示
+ 2888 - 0
tof3d_config.pb.cc


文件差异内容过多而无法显示
+ 2105 - 0
tof3d_config.pb.h


+ 61 - 0
tof3d_config.proto

@@ -0,0 +1,61 @@
+syntax = "proto2";
+
+enum DeviceAzimuth {
+  LF = 1;
+  RF = 2;
+  LR = 3;
+  RR = 4;
+}
+
+message Tof3dVzenseBuiltInParams {
+  optional uint32 work_mode = 1 [default = 0x00];
+  optional uint32 irgmmgain = 2 [default = 45];
+  optional uint32 frame_rate = 3 [default = 10];
+  optional bool enable_manual_exposure_time = 4 [default = true];
+  optional uint32 exposure_time = 5 [default = 2000];
+  optional bool enable_filter_fill_hole = 7 [default = true];
+  optional bool enable_filter_spatial = 8 [default = true];
+  optional bool enable_time_filter = 9 [default = true];
+  optional uint32 time_filter_value = 10 [default = 3];
+  optional bool enable_flying_pixel_filter = 11 [default = true];
+  optional uint32 flying_pixel_filter_value = 12 [default = 15];
+  optional bool enable_confidence_filter = 13 [default = true];
+  optional uint32 confidence_filter_value = 14 [default = 0];
+  optional bool enable_hdr_mode = 15 [default = true];
+}
+
+message Yolov8ProcessParams {
+  // 保存图片设置
+  optional bool save_mat = 1 [default = false];
+  optional bool draw_rect = 2 [default = false];
+  optional float save_confidence = 3 [default = 0.8];
+  optional float save_aspect_ratio_min = 4 [default = 0.9];
+  optional float save_aspect_ratio_max = 5 [default = 1.1];
+}
+
+message RabbitmqCommunicationParams {
+  required bool enable_rabbitmq = 4;
+  required string rabbitmq_ex = 5;
+  required string rabbitmq_route_key = 6;
+}
+
+message MqttCommunicationParams {
+  required bool enable_mqtt = 7;
+  required string mqtt_topic = 8;
+}
+
+message tof3dVzenseEtc {
+  required bool enable_device = 1;  // 设备启动才进行以下处理
+  required string ip = 2 [default = "192.168.1.101"];
+//  optional string url = 3 [default = ""];
+  required Tof3dVzenseBuiltInParams bip = 4;
+  required Yolov8ProcessParams yolo = 5;
+  required DeviceAzimuth azimuth = 6;
+//  optional RabbitmqCommunicationParams rabbitmq_param = 6;
+//  optional MqttCommunicationParams mqtt_param = 7;
+}
+
+message tof3dManagerParams
+{
+  repeated tof3dVzenseEtc vzense_tof3d_devices = 1;
+}

+ 23 - 0
tof3d_manager.cpp

@@ -0,0 +1,23 @@
+//
+// Created by zx on 2023/10/7.
+//
+
+#include "tof3d_manager.h"
+
+Error_manager tof3dManager::init() {
+    if (loadProtobufFile(ETC_PATH PROJECT_NAME "/tof3d_manager.json", m_tof_etc) == SUCCESS) {
+        LOG(INFO) << m_tof_etc.DebugString();
+    } else {
+        return {FAILED, NEGLIGIBLE_ERROR, "tof3dManager read param form %s error.", ETC_PATH"Tof3d/tof3d_manager.json"};
+    }
+
+    auto list = m_tof_etc.vzense_tof3d_devices();
+    std::map<std::string, tof3dVzenseEtc> device_mp;
+    for (auto &device: m_tof_etc.vzense_tof3d_devices()) {
+        device_mp.insert(std::pair<std::string, tof3dVzenseEtc>(device.ip(), device));
+    }
+
+    DeviceTof3D::iter()->Init(device_mp, true);
+
+    return {};
+}

+ 19 - 0
tof3d_manager.h

@@ -0,0 +1,19 @@
+#pragma once
+
+#include "protobuf/load_protobuf.hpp"
+#include "tof3d_config.pb.h"
+#include "vzense/device_tof3d.h"
+#include "error_code/error_code.hpp"
+
+#include "communication/rabbitmq_communication.h"
+class tof3dManager {
+public:
+    tof3dManager() = default;
+    ~tof3dManager() = default;
+
+    Error_manager init();
+protected:
+
+private:
+    tof3dManagerParams m_tof_etc;
+};

+ 12 - 0
tof3d_manager.prototxt

@@ -0,0 +1,12 @@
+vzense_tof3d_devices {
+    ip:"192.168.2.101"
+}
+vzense_tof3d_devices {
+    ip:"192.168.2.102"
+}
+vzense_tof3d_devices {
+    ip:"192.168.2.103"
+}
+vzense_tof3d_devices {
+    ip:"192.168.2.104"
+}

+ 351 - 0
velodyne_manager.prototxt

@@ -0,0 +1,351 @@
+velodyne_lidars {
+  ip: ""
+  port: 2367
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6311
+}
+velodyne_lidars {
+  ip: ""
+  port: 2369
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6312
+}
+velodyne_lidars {
+  ip: ""
+  port: 2370
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6313
+}
+velodyne_lidars {
+  ip: ""
+  port: 2371
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6314
+}
+velodyne_lidars {
+  ip: ""
+  port: 2371
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6315
+}
+velodyne_lidars {
+  ip: ""
+  port: 2371
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6316
+}
+velodyne_lidars {
+  ip: ""
+  port: 2367
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6321
+}
+velodyne_lidars {
+  ip: ""
+  port: 2369
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6322
+}
+velodyne_lidars {
+  ip: ""
+  port: 2370
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6323
+}
+velodyne_lidars {
+  ip: ""
+  port: 2371
+  model: "RSHELIOS16"
+  calibrationFile: "../setting/VLP16db.yaml"
+  lidar_id: 6324
+}
+region {
+  region_id: 3117
+  minx: -3.2
+  maxx: -0.42
+  miny: -2.73
+  maxy: 2.73
+  minz: 0.04
+  maxz: 0.5
+  cut_box {
+    cut_points {
+      x: -3.248
+      y: 1.6974
+    }
+    cut_points {
+      x: -2.876
+      y: 2.804
+    }
+    cut_points {
+      x: -0.381
+      y: 2.8024
+    }
+    cut_points {
+      x: -0.149
+      y: 1.934
+    }
+    cut_points {
+      x: -0.195
+      y: -2.785
+    }
+    cut_points {
+      x: -3.076
+      y: -2.785
+    }
+    cut_points {
+      x: -3.576
+      y: -2.2345
+    }
+  }
+  turnplate_cx: -1.602
+  turnplate_cy: 0.123
+  lidar_exts {
+    lidar_id: 6311
+  }
+  lidar_exts {
+    lidar_id: 6312
+  }
+  lidar_exts {
+    lidar_id: 6313
+  }
+  lidar_exts {
+    lidar_id: 6314
+  }
+  plc_forward_offset {
+    plc_offsetx: 0.000
+    plc_offsety: -6.35546398
+    plc_offset_degree: -90
+    plc_offset_wheel_base: -0.01
+  }
+  plc_reverse_offset {
+    plc_offsetx: 0.000
+    plc_offsety: -6.35546398
+    plc_offset_degree: -90
+    plc_offset_wheel_base: -0.01
+  }
+  car_range_info {
+    border_minx: -1.79
+    border_maxx: -1.41
+    plc_border_miny: 1.49
+    plc_border_maxy: 1.55
+    car_min_width: 1.55
+    car_max_width: 1.95
+    car_min_wheelbase: 2
+    car_max_wheelbase: 3.2
+    turnplate_angle_limit_min_clockwise: -5.3
+    turnplate_angle_limit_max_clockwise: 5.3
+  }
+  terminal_range_info {
+    border_minx: -1.79
+    border_maxx: -1.41
+    plc_border_miny: 1.49
+    plc_border_maxy: 1.55
+    car_min_width: 1.55
+    car_max_width: 1.9
+    car_min_wheelbase: 2
+    car_max_wheelbase: 3.2
+    turnplate_angle_limit_min_clockwise: -5.3
+    turnplate_angle_limit_max_clockwise: 5.3
+  }
+}
+region {
+  region_id: 3118
+  minx: 0.4
+  maxx: 2.8
+  miny: -2.6
+  maxy: 2.7
+  minz: 0.04
+  maxz: 0.5
+  cut_box {
+    cut_points {
+      x: 0.202236
+      y: 1.94702
+    }
+    cut_points {
+      x: 0.611436
+      y: 2.91914
+    }
+    cut_points {
+      x: 2.7929
+      y: 2.853
+    }
+    cut_points {
+      x: 3.4343
+      y: 1.9964
+    }
+    cut_points {
+      x: 3.567
+      y: -1.474
+    }
+    cut_points {
+      x: 3.136
+      y: -2.66
+    }
+    cut_points {
+      x: 0.4147
+      y: -2.7
+    }
+    cut_points {
+      x: 0.455
+      y: -1.46
+    }
+  }
+  turnplate_cx: 1.63245
+  turnplate_cy: 0.065
+  lidar_exts {
+    lidar_id: 6313
+  }
+  lidar_exts {
+    lidar_id: 6314
+  }
+  lidar_exts {
+    lidar_id: 6315
+  }
+  lidar_exts {
+    lidar_id: 6316
+  }
+  plc_forward_offset {
+    plc_offsetx: 0.015
+    plc_offsety: -6.31662941
+    plc_offset_degree: -90
+    plc_offset_wheel_base: -0
+  }
+  plc_reverse_offset {
+    plc_offsetx: 0.015
+    plc_offsety: -6.31662941
+    plc_offset_degree: -90
+    plc_offset_wheel_base: -0
+  }
+  car_range_info {
+    border_minx: 1.41
+    border_maxx: 1.79
+    plc_border_miny: 1.49
+    plc_border_maxy: 1.55
+    car_min_width: 1.55
+    car_max_width: 1.95
+    car_min_wheelbase: 2.3
+    car_max_wheelbase: 3.15
+    turnplate_angle_limit_min_clockwise: -5.3
+    turnplate_angle_limit_max_clockwise: 5.3
+  }
+  terminal_range_info {
+    border_minx: 1.41
+    border_maxx: 1.79
+    plc_border_miny: 1.49
+    plc_border_maxy: 1.55
+    car_min_width: 1.55
+    car_max_width: 1.9
+    car_min_wheelbase: 2.3
+    car_max_wheelbase: 3.15
+    turnplate_angle_limit_min_clockwise: -5.3
+    turnplate_angle_limit_max_clockwise: 5.3
+  }
+}
+region {
+  region_id: 3219
+  minx: -1.2
+  maxx: 1.12
+  miny: -2.6
+  maxy: 2.9
+  minz: 0.03
+  maxz: 0.5
+  cut_box {
+    cut_points {
+      x: -1.764
+      y: 1.537
+    }
+    cut_points {
+      x: -1.8198
+      y: 1.99788
+    }
+    cut_points {
+      x: -1.31579
+      y: 2.85
+    }
+    cut_points {
+      x: 1.2329
+      y: 2.85
+    }
+    cut_points {
+      x: 2.01
+      y: 1.959
+    }
+    cut_points {
+      x: 2.01
+      y: -2.629
+    }
+    cut_points {
+      x: -1.269
+      y: -2.726
+    }
+    cut_points {
+      x: -1.96
+      y: -1.3466
+    }
+    cut_points {
+      x: -1.764
+      y: 1.137
+    }
+  }
+  turnplate_cx: -0.09
+  turnplate_cy: 0.18
+  lidar_exts {
+    lidar_id: 6321
+  }
+  lidar_exts {
+    lidar_id: 6322
+  }
+  lidar_exts {
+    lidar_id: 6323
+  }
+  lidar_exts {
+    lidar_id: 6324
+  }
+  plc_forward_offset {
+    plc_offsetx: -1.482
+    plc_offsety: -6.430
+    plc_offset_degree: -90.4558368
+    plc_offset_wheel_base: -0.01
+  }
+  plc_reverse_offset {
+    plc_offsetx: -1.482
+    plc_offsety: -6.430
+    plc_offset_degree: -90.4558368
+    plc_offset_wheel_base: -0.01
+  }
+  car_range_info {
+    border_minx: -1.79
+    border_maxx: -1.41
+    plc_border_miny: 1.4
+    plc_border_maxy: 1.55
+    car_min_width: 1.55
+    car_max_width: 1.95
+    car_min_wheelbase: 2.3
+    car_max_wheelbase: 3.15
+    turnplate_angle_limit_min_clockwise: -5.3
+    turnplate_angle_limit_max_clockwise: 5.3
+  }
+  terminal_range_info {
+    border_minx: -1.79
+    border_maxx: -1.41
+    plc_border_miny: 1.4
+    plc_border_maxy: 1.55
+    car_min_width: 1.55
+    car_max_width: 1.9
+    car_min_wheelbase: 2.3
+    car_max_wheelbase: 3.15
+    turnplate_angle_limit_min_clockwise: -5.3
+    turnplate_angle_limit_max_clockwise: 5.3
+  }
+}
+fence_data_path: "/home/zx/data/ground_detect/"
+distribution_mode: false

+ 721 - 0
vzense/device_tof3d.cpp

@@ -0,0 +1,721 @@
+#include "device_tof3d.h"
+
+Error_manager DeviceTof3D::Init(const VzEtcMap &tp_tof3d, bool search) {
+    LOG(INFO) << "Init DeviceTof3D.";
+    m_vz_status = VZ_Initialize();
+    if (m_vz_status != VzReturnStatus::VzRetOK && m_vz_status != VzReturnStatus::VzRetReInitialized) {
+        return {TOF3D_VZENSE_DEVICE_INIT_FAILED, MAJOR_ERROR, "VzInitialize failed status: %d", m_vz_status};
+    }
+
+    loadEtc(tp_tof3d);
+
+    if (search) {
+        SearchDevice(5);
+    }
+
+    ConnectAllEtcDevices();
+
+    VZ_SetHotPlugStatusCallback(DeviceTof3D::HotPlugStateCallback, &mp_device_info);
+
+    //启动 线程。
+    for (auto &info: mp_device_info) {
+        setTof3dParams(info.second.etc.ip());
+        getTof3dParams(info.second.etc.ip());
+        mp_thread_info.insert(std::pair<std::string, ThreadInfo>(
+                info.first, ThreadInfo(new std::thread(&DeviceTof3D::receiveFrameThread, this, info.first),
+                                       new Thread_condition()
+                                       )
+                ));
+    }
+
+    for (auto &t: mp_thread_info) {
+        t.second.condit->notify_all(true);
+    }
+
+    return {TOF3D_VZENSE_DEVICE_INIT_SUCCESS, NORMAL, "VzInitialize success."};
+}
+
+Error_manager DeviceTof3D::SearchDevice(const double &time) {
+    uint32_t deviceCount = 0;
+
+    auto t = std::chrono::steady_clock::now();
+    std::chrono::duration<double> cost = std::chrono::steady_clock::now() - t;
+    while (cost.count() < time) {
+        m_vz_status = VZ_GetDeviceCount(&deviceCount);
+        cost = std::chrono::steady_clock::now() - t;
+        if (m_vz_status != VzReturnStatus::VzRetOK) {
+            LOG(INFO) << "VzGetDeviceCount failed! make sure the DCAM is connected";
+            std::this_thread::sleep_for(std::chrono::seconds(1));
+            continue;
+        }
+        std::this_thread::sleep_for(std::chrono::seconds(1));
+        LOG(INFO) << "Found device count: " << deviceCount;
+    }
+
+    VzDeviceInfo *pDeviceListInfo = new VzDeviceInfo[deviceCount];
+
+    m_vz_status = VZ_GetDeviceInfoList(deviceCount, pDeviceListInfo);
+
+    if (m_vz_status != VzReturnStatus::VzRetOK) {
+        LOG(INFO) << "GetDeviceListInfo failed status:" << m_vz_status;
+        return {}; //TODO
+    } else {
+        for (int i = 0; i < deviceCount; i++) {
+            auto iter = mp_device_info.find(pDeviceListInfo[i].ip);
+            if (iter == mp_device_info.end()) {
+                mp_device_info.insert(std::pair<std::string, tof3dVzenseInfo>(pDeviceListInfo[i].ip, tof3dVzenseInfo()));
+                iter = mp_device_info.find(pDeviceListInfo[i].ip);
+            }
+            iter->second.info = pDeviceListInfo[i];
+            LOG(INFO) << "Found device: " << pDeviceListInfo[i].ip;
+        }
+    }
+    return {};
+}
+
+void DeviceTof3D::run() {
+
+}
+
+Error_manager DeviceTof3D::ConnectDevice(const std::string &ip, bool open_stream) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        LOG(WARNING) << "Can\'t find " << ip << " in mp_device_info";
+        return {}; //TODO
+    }
+
+    m_vz_status = VZ_OpenDeviceByIP(iter->second.info.ip, &iter->second.handle);
+    if (m_vz_status != VzReturnStatus::VzRetOK) {
+        iter->second.is_connect = false;
+        LOG(WARNING) << "OpenDevice " << ip << " failed status:" << m_vz_status;
+        return {}; //TODO
+    } else {
+        iter->second.is_connect = true;
+        LOG(INFO) << "OpenDevice " << ip << " success status:" << m_vz_status;
+    }
+
+    if (open_stream) {
+        m_vz_status = VZ_StartStream(iter->second.handle);
+        if (m_vz_status != VzReturnStatus::VzRetOK) {
+            iter->second.is_start_stream = false;
+            LOG(WARNING) << "VZ_StartStream " << ip << " failed status:" << m_vz_status;
+            return {};
+        } else {
+            iter->second.is_start_stream = true;
+            LOG(INFO) << "VZ_StartStream " << ip << " success status:" << m_vz_status;
+        }
+    }
+
+    return {};
+}
+
+Error_manager DeviceTof3D::ConnectAllEtcDevices(bool open_stream) {
+    for (auto &device: mp_device_info) {
+        if (device.second.etc.enable_device()) {
+            ConnectDevice(device.first, open_stream);
+        }
+    }
+    return {};
+}
+
+Error_manager DeviceTof3D::ConnectAllDevices(bool open_stream) {
+    for (auto &device: mp_device_info) {
+        ConnectDevice(device.first, open_stream);
+    }
+    return {};
+}
+
+Error_manager DeviceTof3D::DisConnectDevice(const std::string &ip) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        return {}; //TODO
+    }
+
+    m_vz_status = VZ_StopStream(iter->second.handle);
+    if (m_vz_status != VzReturnStatus::VzRetOK) {
+        iter->second.is_start_stream = true;
+        LOG(WARNING) << "VZ_StopStream failed status:" << m_vz_status;
+    } else {
+        iter->second.is_start_stream = false;
+        LOG(INFO) << "VZ_StopStream success status:" << m_vz_status;
+    }
+
+    m_vz_status = VZ_CloseDevice(&iter->second.handle);
+    if (m_vz_status != VzReturnStatus::VzRetOK) {
+        iter->second.is_connect = true;
+        LOG(WARNING) << "VZ_CloseDevice failed status:" << m_vz_status;
+    } else {
+        iter->second.is_connect = false;
+        LOG(INFO) << "VZ_CloseDevice success status:" << m_vz_status;
+    }
+
+    return {};
+}
+
+Error_manager DeviceTof3D::DisConnectAllEtcDevices() {
+    for (auto &device: mp_device_info) {
+        if (device.second.etc.enable_device()) {
+            DisConnectDevice(device.first);
+        }
+    }
+    return {};
+}
+
+Error_manager DeviceTof3D::DisConnectAllDevices() {
+    for (auto &device: mp_device_info) {
+        DisConnectDevice(device.first);
+    }
+    return {};
+}
+
+Error_manager DeviceTof3D::getDepthFrame(const std::string &ip, VzFrame &depthFrame) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        return {FAILED, NORMAL, "Device %s not in list, can\'t get depth frame.", ip.c_str()}; //TODO
+    }
+
+    if (iter->second.handle == nullptr || !iter->second.is_connect) {
+        if(ConnectDevice(ip, true) != SUCCESS) {
+            return {FAILED, NORMAL, "Open device %s failed, stop get depth frame.", ip.c_str()};
+        }
+    }
+
+    VzFrameReady frameReady = {0};
+    m_vz_status = VZ_GetFrameReady(iter->second.handle, 100, &frameReady);
+
+    if (m_vz_status != VzRetOK) {
+        LOG(WARNING) << ip << "  VZ_GetFrameReady failed status:" << m_vz_status;
+        return {}; //TODO
+    }
+
+    if (1 == frameReady.depth) {
+        m_vz_status = VZ_GetFrame(iter->second.handle, VzDepthFrame, &depthFrame);
+
+        if (m_vz_status == VzRetOK && depthFrame.pFrameData != nullptr) {
+//            LOG(INFO) << ip << " VZ_GetFrame VzDepthFrame success:" << m_vz_status;
+        } else {
+            LOG(INFO) << ip << " VZ_GetFrame VzDepthFrame failed:" << m_vz_status;
+        }
+    }
+
+    return {};
+}
+
+Error_manager DeviceTof3D::getIrFrame(const std::string &ip, VzFrame &irFrame) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        return {FAILED, NORMAL, "Device %s not in list, can\'t get ir frame.", ip.c_str()}; //TODO
+    }
+
+    if (iter->second.handle == nullptr || !iter->second.is_connect) {
+        if(ConnectDevice(ip, true) != SUCCESS) {
+            return {FAILED, NORMAL, "Open device %s failed, stop get ir frame.", ip.c_str()};
+        }
+    }
+
+    VzFrameReady frameReady = {0};
+    m_vz_status = VZ_GetFrameReady(iter->second.handle, 100, &frameReady);
+
+    if (m_vz_status != VzRetOK) {
+        LOG(WARNING) << ip << "  VZ_GetFrameReady failed status:" << m_vz_status;
+        return {}; //TODO
+    }
+
+    if (1 == frameReady.ir) {
+        m_vz_status = VZ_GetFrame(iter->second.handle, VzIRFrame, &irFrame);
+
+        if (m_vz_status == VzRetOK && irFrame.pFrameData != nullptr) {
+            LOG(INFO) << ip << "VZ_GetFrame VzIrFrame success:" << m_vz_status;
+        } else {
+            LOG(INFO) << ip << "VZ_GetFrame VzIrFrame failed:" << m_vz_status;
+        }
+    }
+
+    return {};
+}
+
+Error_manager DeviceTof3D::getDepthAndIrPicture(const std::string &ip, VzFrame &depthFrame, VzFrame &irFrame) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        return {FAILED, NORMAL, "Device %s not in list, can\'t get depth and ir frame.", ip.c_str()}; //TODO
+    }
+
+    if (iter->second.handle == nullptr || !iter->second.is_connect) {
+        if(ConnectDevice(ip, true) != SUCCESS) {
+            return {FAILED, NORMAL, "Open device %s failed, stop get depth and ir frame.", ip.c_str()};
+        }
+    }
+
+    Error_manager ret;
+    VzFrameReady frameReady = {0};
+    m_vz_status = VZ_GetFrameReady(iter->second.handle, 100, &frameReady);
+
+    if (m_vz_status != VzRetOK) {
+        LOG(WARNING) << ip << "  VZ_GetFrameReady failed status:" << m_vz_status;
+        ret.compare_and_merge_up({FAILED, MINOR_ERROR, "Device %s VZ_GetFrameReady failed status: %d.", ip.c_str(), m_vz_status});    //TODO
+        return ret;
+    }
+
+    //Get depth frame, depth frame only output in following data mode
+    if (1 == frameReady.depth) {
+        m_vz_status = VZ_GetFrame(iter->second.handle, VzDepthFrame, &depthFrame);
+
+        if (m_vz_status == VzRetOK && depthFrame.pFrameData != nullptr) {
+            LOG(INFO) << ip << " frameIndex :" << depthFrame.frameIndex;
+        } else {
+            ret.compare_and_merge_up(
+                    {FAILED, MINOR_ERROR, "Device %s VZ_GetFrame VzIrFrame failed status: %d.", ip.c_str(),
+                     m_vz_status});    //TODO
+            LOG(INFO) << ip << "VZ_GetFrame VzIrFrame status:" << m_vz_status;
+        }
+    } else {
+        ret.compare_and_merge_up({FAILED, MINOR_ERROR, "Device %s VZ_GetFrameReady depth not ready: %d.", ip.c_str(), frameReady.depth});
+    }
+
+    if (1 == frameReady.ir) {
+        m_vz_status = VZ_GetFrame(iter->second.handle, VzIRFrame, &irFrame);
+        if (m_vz_status == VzRetOK && irFrame.pFrameData != nullptr) {
+            LOG(INFO) << ip << " frameIndex :" << irFrame.frameIndex;
+        } else {
+            ret.compare_and_merge_up({FAILED, MINOR_ERROR, "Device %s VZ_GetFrame VzIrFrame failed status: %d.", ip.c_str(), m_vz_status});    //TODO
+            LOG(INFO) << ip << "VZ_GetFrame VzIrFrame status:" << m_vz_status;
+        }
+    } else {
+        ret.compare_and_merge_up({FAILED, NORMAL, "Device %s VZ_GetFrameReady ir not ready: %d.", ip.c_str(), frameReady.depth});
+    }
+
+    return ret;
+}
+
+Error_manager DeviceTof3D::getDepthPointCloud(const std::string &ip, pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        return {FAILED, NORMAL, "Device %s not in list, can\'t get point cloud.", ip.c_str()}; //TODO
+    }
+
+    if (iter->second.handle == nullptr || !iter->second.is_connect) {
+        if(ConnectDevice(ip, true) != SUCCESS) {
+            return {FAILED, NORMAL, "Open device %s failed, stop get point cloud.", ip.c_str()};
+        }
+    }
+
+    VzFrameReady frameReady = {0};
+    m_vz_status = VZ_GetFrameReady(iter->second.handle, 100, &frameReady);
+
+    if (m_vz_status != VzRetOK) {
+        LOG(WARNING) << ip << "  VZ_GetFrameReady failed status:" << m_vz_status;
+        return {}; //TODO
+    }
+
+    //Get depth frame, depth frame only output in following data mode
+    if (1 == frameReady.depth) {
+        VzFrame depthFrame = {0};
+        m_vz_status = VZ_GetFrame(iter->second.handle, VzDepthFrame, &depthFrame);
+
+        if (m_vz_status == VzRetOK && depthFrame.pFrameData != nullptr) {
+            LOG(INFO) << ip << " frameIndex :" << depthFrame.frameIndex;
+
+            VzFrame &srcFrame = depthFrame;
+            const int len = srcFrame.width * srcFrame.height;
+            VzVector3f *worldV = new VzVector3f[len];
+
+            m_vz_status = VZ_ConvertDepthFrameToPointCloudVector(iter->second.handle, &srcFrame,
+                                                   worldV);
+
+            if (m_vz_status == VzRetOK) {
+                cloud->clear();
+                for (int i = 0; i < len; i++) {
+                    if (worldV[i].x == 0 && worldV[i].y == 0 && worldV[i].z == 0) {
+                        continue;
+                    }
+                    cloud->points.emplace_back(worldV[i].x, worldV[i].y, worldV[i].z);
+                }
+                delete[] worldV;
+                worldV = nullptr;
+                LOG(INFO) << "Save point cloud successful to cloud: " << cloud->size();
+            }
+        } else {
+            LOG(INFO) << ip << "VZ_GetFrame VzIrFrame status:" << m_vz_status;
+        }
+    }
+
+    return {};
+}
+
+Error_manager DeviceTof3D::DepthFrame2PointCloud(const std::string &ip, VzFrame &depthFrame, pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud) {
+//    if (depthFrame.frameType != VzDepthFrame) {
+//        return {};
+//    }
+//
+//    auto handle_iter = mp_devices_handle.find(ip);
+//    if (handle_iter == mp_devices_handle.end()) {
+//        return {}; //TODO
+//    }
+//
+//    cloud->clear();
+//    if (handle_iter->second) {
+//
+//        VzFrame &srcFrame = depthFrame;
+//        const int len = srcFrame.width * srcFrame.height;
+//        VzVector3f *worldV = new VzVector3f[len];
+//
+//        VZ_ConvertDepthFrameToPointCloudVector(handle_iter->second, &depthFrame, worldV);
+//
+//        for (int i = 0; i < len; i++) {
+//            cloud->points.emplace_back(worldV[i].x, worldV[i].y, worldV[i].z);
+//        }
+//        delete[] worldV;
+//        worldV = nullptr;
+//        LOG(INFO) << "Save point cloud successful to cloud: " << cloud->size();
+//        WriteTxtCloud(ETC_PATH"Tof3d/data/cloud.txt", cloud);
+//    }
+
+    return {};
+}
+
+Error_manager DeviceTof3D::Frame2Mat(VzFrame &frame, cv::Mat &mat) {
+    switch (frame.frameType) {
+        case VzDepthFrame:
+            return DepthFrame2Mat(frame, mat);
+        case VzIRFrame:
+            return IrFrame2Mat(frame, mat);
+        default:
+            break;
+    }
+    return {};
+}
+
+Error_manager DeviceTof3D::DepthFrame2Mat(VzFrame &frame, cv::Mat &mat) {
+    if (frame.frameType != VzDepthFrame) {
+        return {};
+    }
+
+    if (mat.type() != CV_16UC1) {
+        LOG(INFO) << "Mat type " << mat.type() << " not is " << CV_16UC1;
+        return {};
+    }
+
+    if (mat.rows != 480 || mat.cols != 640) {
+        LOG(INFO) << "Mat size (" << mat.rows << ", " << mat.cols << ") not is (480, 640)";
+        return {};
+    }
+
+    memcpy(mat.data, frame.pFrameData, frame.dataLen);
+    mat.convertTo(mat, CV_8U, 255.0 / 7495);
+    applyColorMap(mat, mat, cv::COLORMAP_RAINBOW);
+
+    return {};
+}
+
+Error_manager DeviceTof3D::IrFrame2Mat(VzFrame &frame, cv::Mat &mat) {
+    if (frame.frameType != VzIRFrame) {
+        return {};
+    }
+
+    if (mat.type() != CV_8UC1) {
+        LOG(INFO) << "Mat type " << mat.type() << " not is " << CV_8UC1;
+        return {};
+    }
+
+    if (mat.rows != 480 || mat.cols != 640) {
+        LOG(INFO) << "Mat size (" << mat.rows << ", " << mat.cols << ") not is (480, 640)";
+        return {};
+    }
+
+    memcpy(mat.data, frame.pFrameData, frame.dataLen);
+
+    return {};
+}
+
+#include <sys/stat.h>
+void DeviceTof3D::receiveFrameThread(const std::string &ip) {
+    LOG(INFO) << ip << " in thread " << std::this_thread::get_id();
+    auto iter = mp_device_info.find(ip);
+    auto t_iter = mp_thread_info.find(ip);
+    TensorrtWheelDetector detector(ETC_PATH PROJECT_NAME "/best.engine");
+
+    while (t_iter->second.condit->is_alive()) {
+        t_iter->second.condit->wait();
+        if (t_iter->second.condit->is_alive() ) {
+           Error_manager ret;
+           VzFrame depthFrame;
+           if (iter->second.handle) {
+                VzFrame depthFrame = {0};
+                cv::Mat depthMat(480, 640, CV_16UC1);
+                if(getDepthFrame(ip, depthFrame) == SUCCESS && DepthFrame2Mat(depthFrame, depthMat) == SUCCESS) {
+
+                } else {
+                    ret.compare_and_merge_up({FAILED, MINOR_ERROR, "Device %s VZ_GetFrame failed.", ip.c_str()});
+                }
+           } else {
+               ret.compare_and_merge_up({FAILED, MINOR_ERROR, "Device %s handle is null.", ip.c_str()});
+           }
+           LOG(INFO) << ret.get_error_description();
+           std::this_thread::sleep_for(std::chrono::milliseconds (1));
+        }
+    }
+
+    DisConnectDevice(ip);
+    LOG(INFO) << ip << " thread end " << std::this_thread::get_id();
+}
+
+void DeviceTof3D::HotPlugStateCallback(const VzDeviceInfo *pInfo, int status, void *contex)
+{
+    LOG(WARNING) << "uri " << status << "  " << pInfo->uri << "    " << (status == 0 ? "add" : "remove");
+    LOG(WARNING) << "alia " << status << "  " << pInfo->alias << "    " << (status == 0 ? "add" : "remove");
+
+    if (contex == nullptr) {
+        return;
+    }
+
+    tof3dVzenseInfoMap *mp = (tof3dVzenseInfoMap *)contex;
+    auto iter = mp->find(pInfo->ip);
+    LOG(INFO) << iter->first;
+    if (status == 0)
+    {
+        LOG(WARNING) << "VZ_OpenDevice " << VZ_OpenDeviceByUri(pInfo->uri, &iter->second.handle);
+        LOG(WARNING) << "VZ_StartStream " << VZ_StartStream(iter->second.handle);
+        iter->second.is_connect = true;
+        iter->second.is_start_stream = true;
+    }
+    else
+    {
+        LOG(WARNING) << "VZ_StopStream " << VZ_StopStream(iter->second.handle);
+        LOG(WARNING) << "VZ_CloseDevice " << VZ_CloseDevice(&iter->second.handle);
+        iter->second.is_connect = false;
+        iter->second.is_start_stream = false;
+    }
+}
+
+bool DeviceTof3D::drawBox(cv::Mat &mat, cv::Rect &box, cv::Scalar &color, std::string className, float confidence) {
+    // Detection box
+    cv::rectangle(mat, box, color, 2);
+    // Detection box text
+    std::string classString = className + ' ' + std::to_string(confidence).substr(0, 4);
+    cv::Size textSize = cv::getTextSize(classString, cv::FONT_HERSHEY_DUPLEX, 1, 2, 0);
+    cv::Rect textBox(box.x, box.y - 40, textSize.width + 10, textSize.height + 20);
+    cv::rectangle(mat, textBox, color, cv::FILLED);
+    cv::putText(mat, classString, cv::Point(box.x + 5, box.y - 10), cv::FONT_HERSHEY_DUPLEX, 1, cv::Scalar(0, 0, 0), 2, 0);
+    return true;
+}
+
+Error_manager DeviceTof3D::updateTof3dEtc() {
+    for (auto &info : mp_device_info) {
+        if (info.second.etc.enable_device()) {
+
+        }
+    }
+    return Error_manager();
+}
+
+Error_manager DeviceTof3D::loadEtc(const DeviceTof3D::VzEtcMap &etc) {
+    for (auto &iter: etc) {
+        tof3dVzenseInfo info;
+        info.etc = iter.second;
+        mp_device_info.insert(std::pair<std::string, tof3dVzenseInfo>(iter.first, info));
+        LOG(INFO) << "Get device " << iter.first << " etc: " << iter.second.DebugString();
+    }
+    return Error_manager();
+}
+
+void DeviceTof3D::stopWorking() {
+    for (auto &info: mp_device_info) {
+        auto iter = mp_thread_info.find(info.second.etc.ip());
+        if (iter != mp_thread_info.end()) {
+            iter->second.condit->kill_all();
+            iter->second.t->join();
+            delete iter->second.t;
+            iter->second.t = nullptr;
+        }
+    }
+    mp_device_info.clear();
+    mp_thread_info.clear();
+}
+
+Error_manager DeviceTof3D::reInit(const VzEtcMap &etc) {
+    LOG(INFO) << "================= Reinit Device Tof3D.";
+    stopWorking();
+
+    Error_manager ret = Init(etc, true);
+    return ret;
+}
+
+Error_manager DeviceTof3D::setTof3dParams(const std::string &ip) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        LOG(WARNING) << "Can\'t find " << ip << " in mp_device_info";
+        return {}; //TODO
+    }
+
+    m_vz_status = VZ_SetWorkMode(iter->second.handle, (VzWorkMode)iter->second.etc.bip().work_mode());
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetWorkMode failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    m_vz_status = VZ_SetIRGMMGain(iter->second.handle, iter->second.etc.bip().irgmmgain());
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetIRGMMGain failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    m_vz_status = VZ_SetFrameRate(iter->second.handle, iter->second.etc.bip().frame_rate());
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetFrameRate failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    VzExposureTimeParams exposure_param;
+    exposure_param.mode = (VzExposureControlMode)iter->second.etc.bip().enable_manual_exposure_time();
+    exposure_param.exposureTime = iter->second.etc.bip().exposure_time();
+    m_vz_status = VZ_SetExposureTime(iter->second.handle, VzToFSensor, exposure_param);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetExposureTime failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    m_vz_status = VZ_SetFillHoleFilterEnabled(iter->second.handle, iter->second.etc.bip().enable_filter_fill_hole());
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetFillHoleFilterEnabled failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    m_vz_status = VZ_SetSpatialFilterEnabled(iter->second.handle, iter->second.etc.bip().enable_filter_spatial());
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetSpatialFilterEnabled failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    VzFlyingPixelFilterParams fly;
+    fly.enable = iter->second.etc.bip().enable_flying_pixel_filter();
+    fly.threshold = iter->second.etc.bip().flying_pixel_filter_value();
+    m_vz_status = VZ_SetFlyingPixelFilterParams(iter->second.handle, fly);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetFlyingPixelFilterParams failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    VzConfidenceFilterParams confidence;
+    confidence.enable = iter->second.etc.bip().enable_confidence_filter();
+    confidence.threshold = iter->second.etc.bip().confidence_filter_value();
+    m_vz_status = VZ_SetConfidenceFilterParams(iter->second.handle, confidence);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetConfidenceFilterParams failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    VzTimeFilterParams time_filter;
+    time_filter.enable = iter->second.etc.bip().enable_time_filter();
+    time_filter.threshold = iter->second.etc.bip().time_filter_value();
+    m_vz_status = VZ_SetTimeFilterParams(iter->second.handle, time_filter);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_SetTimeFilterParams failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+
+    return Error_manager();
+}
+
+Error_manager DeviceTof3D::getTof3dParams(const std::string &ip) {
+    auto iter = mp_device_info.find(ip);
+    if (iter == mp_device_info.end()) {
+        LOG(WARNING) << "Can\'t find " << ip << " in mp_device_info";
+        return {}; //TODO
+    }
+
+    // 获取参数
+    VzWorkMode mode;
+    m_vz_status = VZ_GetWorkMode(iter->second.handle, &mode);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetWorkMode failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_work_mode(mode);
+
+    uint8_t irgmmgain;
+    m_vz_status = VZ_GetIRGMMGain(iter->second.handle, &irgmmgain);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetIRGMMGain failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_irgmmgain(irgmmgain);
+
+    int frame_rate;
+    m_vz_status = VZ_GetFrameRate(iter->second.handle, &frame_rate);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetFrameRate failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_frame_rate(frame_rate);
+
+    VzExposureTimeParams exposure_param = {VzExposureControlMode_Manual, 0};
+    m_vz_status = VZ_GetProperty(iter->second.handle, "Py_ToFExposureTimeMax", &exposure_param, sizeof(exposure_param));
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetExposureTime failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_enable_manual_exposure_time(exposure_param.mode);
+    iter->second.etc.mutable_bip()->set_exposure_time(exposure_param.exposureTime);
+
+    bool boolret;
+    m_vz_status = VZ_GetFillHoleFilterEnabled(iter->second.handle, &boolret);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetFillHoleFilterEnabled failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_enable_filter_fill_hole(boolret);
+
+    m_vz_status = VZ_GetSpatialFilterEnabled(iter->second.handle, &boolret);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetSpatialFilterEnabled failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_enable_filter_spatial(boolret);
+
+    VzFlyingPixelFilterParams fly;
+    m_vz_status = VZ_GetFlyingPixelFilterParams(iter->second.handle, &fly);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetFlyingPixelFilterParams failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_enable_flying_pixel_filter(fly.enable);
+    iter->second.etc.mutable_bip()->set_flying_pixel_filter_value(fly.threshold);
+
+    VzConfidenceFilterParams confidence;
+    m_vz_status = VZ_GetConfidenceFilterParams(iter->second.handle, &confidence);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetConfidenceFilterParams failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_enable_confidence_filter(confidence.enable);
+    iter->second.etc.mutable_bip()->set_confidence_filter_value(confidence.threshold);
+
+    VzTimeFilterParams time_filter;
+    m_vz_status = VZ_GetTimeFilterParams(iter->second.handle, &time_filter);
+    if (m_vz_status != VzReturnStatus::VzRetOK)
+    {
+        LOG(WARNING) << ip << " VZ_GetTimeFilterParams failed status:" << m_vz_status;
+        return {};  //TODO
+    }
+    iter->second.etc.mutable_bip()->set_enable_time_filter(time_filter.enable);
+    iter->second.etc.mutable_bip()->set_time_filter_value(time_filter.threshold);
+
+    return Error_manager();
+}

+ 133 - 0
vzense/device_tof3d.h

@@ -0,0 +1,133 @@
+#pragma once
+
+#include <thread>
+#include <boost/chrono/duration.hpp>
+#include <opencv2/opencv.hpp>
+#include <pcl/point_types.h>
+#include <pcl/point_cloud.h>
+
+#include "error_code/error_code.hpp"
+#include "VzenseNebula_api.h"
+#include "log/log.h"
+#include "thread/thread_condition.h"
+
+#include "tof3d_config.pb.h"
+#include "detect/wheel-detector.h"
+
+
+class DeviceTof3D {
+public:
+    struct tof3dVzenseInfo {
+        VzDeviceInfo info;
+        tof3dVzenseEtc etc;
+        VzDeviceHandle handle = nullptr;
+        bool is_connect = false;
+        bool is_start_stream = false;
+    };
+
+    using VzEtcMap = std::map<std::string, tof3dVzenseEtc>;
+    using tof3dVzenseInfoMap = std::map<std::string, tof3dVzenseInfo>;
+public:
+    static DeviceTof3D* iter() {
+        static DeviceTof3D* instance = nullptr;
+        if (instance == nullptr) {
+            instance = new DeviceTof3D();
+        }
+        return instance;
+    }
+
+    ~DeviceTof3D() = default;
+
+    // TODO
+    void run();
+
+    Error_manager Init(const VzEtcMap &tp_tof3d, bool search= true);
+
+    Error_manager reInit(const VzEtcMap &etc);
+
+    Error_manager SearchDevice(const double &time);
+
+    Error_manager ConnectDevice(const std::string &ip, bool open_stream=true);
+
+    Error_manager ConnectAllDevices(bool open_stream=true);
+
+    Error_manager ConnectAllEtcDevices(bool open_stream=true);
+
+    Error_manager DisConnectDevice(const std::string &ip);
+
+    Error_manager DisConnectAllEtcDevices();
+
+    Error_manager DisConnectAllDevices();
+
+    Error_manager getDepthFrame(const std::string &ip, VzFrame &depthFrame);
+
+    Error_manager getIrFrame(const std::string &ip, VzFrame &irFrame);
+
+    Error_manager getDepthAndIrPicture(const std::string &ip, VzFrame &depthFrame, VzFrame &irFrame);
+
+    Error_manager getDepthPointCloud(const std::string &ip, pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud);
+
+    Error_manager DepthFrame2PointCloud(const std::string &ip, VzFrame &depthFrame, pcl::PointCloud<pcl::PointXYZ>::Ptr &cloud);
+
+    Error_manager updateTof3dEtc();
+
+    Error_manager setTof3dParams(const std::string &ip);
+
+    Error_manager getTof3dParams(const std::string &ip);
+
+    static Error_manager Frame2Mat(VzFrame &frame, cv::Mat &mat);
+
+protected:
+    DeviceTof3D() = default;
+
+    static Error_manager DepthFrame2Mat(VzFrame &frame, cv::Mat &mat);
+
+    static Error_manager IrFrame2Mat(VzFrame &frame, cv::Mat &mat);
+
+    void receiveFrameThread(const std::string &ip);
+
+    void detectThread(const std::string &ip);
+
+    static void HotPlugStateCallback(const VzDeviceInfo *pInfo, int status, void *contex);
+
+    bool drawBox(cv::Mat &mat, cv::Rect &box, cv::Scalar &color, std::string className, float confidence);
+
+    Error_manager loadEtc(const VzEtcMap &etc);
+
+    void stopWorking();
+
+    // 传入深度图,得到车轮在mat中的坐标
+    Error_manager segmentTensorRT(cv::Mat &depth_mat, std::vector<cv::Point> &wheel_cv_cloud);
+
+    // 根据传入坐标,分离depth frame的数据点
+    Error_manager segFrame2CarAndWheel(VzFrame &depth_frame, std::vector<cv::Point> &wheel_cv_cloud, pcl::PointCloud<pcl::PointXYZ>::Ptr &car_cloud, pcl::PointCloud<pcl::PointXYZ>::Ptr &wheel_cloud);
+
+    // 车轮点云优化
+    Error_manager WheelCloudOptimize(pcl::PointCloud<pcl::PointXYZ>::Ptr &wheel_cloud);
+
+    // 车身点云优化
+    Error_manager CarPoseOptimize(pcl::PointCloud<pcl::PointXYZ>::Ptr &car_cloud);
+private:
+    struct ThreadInfo {
+        std::thread * t;
+        Thread_condition * condit;
+
+        ThreadInfo(std::thread *m_t, Thread_condition * m_condit): t(m_t), condit(m_condit) {}
+    };
+    VzReturnStatus m_vz_status = VzRetOthers;
+
+    tof3dVzenseInfoMap mp_device_info;
+    std::map<std::string, ThreadInfo> mp_thread_info;
+
+    VzFrame lf_frame;
+    std::mutex lf_frame_mutex;                                              // 测量结果互斥锁
+    VzFrame rf_frame;
+    std::mutex rf_frame_mutex;                                              // 测量结果互斥锁
+    VzFrame lr_frame;
+    std::mutex lr_frame_mutex;                                              // 测量结果互斥锁
+    VzFrame rr_frame;
+    std::mutex rr_frame_mutex;                                              // 测量结果互斥锁
+
+};
+
+