Browse Source

1、添加grpc;
2、添加识别结果逻辑判断;
3、添加六维数据修改;

LiuZe 10 months ago
parent
commit
613cb5714e

+ 34 - 0
communication/grpc/streamServer.cpp

@@ -241,6 +241,40 @@ CustomServer::CloseMeasureDataStream(::grpc::ServerContext *context, const ::Jet
     return img;
 }
 
+grpc::Status CustomServer::setTofTransformation(::grpc::ServerContext *context, const ::JetStream::RequestCmd *request,
+                                                ::JetStream::Response *response) {
+    CoordinateTransformation3D params;
+    TofTransformationManager::iter()->get((DeviceAzimuth)request->id(), params);
+    if (request->params().has_x()) {
+        params.set_x(request->params().x());
+    }
+    if (request->params().has_y()) {
+        params.set_y(request->params().y());
+    }
+    if (request->params().has_z()) {
+        params.set_z(request->params().z());
+    }
+    if (request->params().has_pitch()) {
+        params.set_pitch(request->params().pitch());
+    }
+    if (request->params().has_roll()) {
+        params.set_roll(request->params().roll());
+    }
+    if (request->params().has_yaw()) {
+        params.set_yaw(request->params().yaw());
+    }
+    TofTransformationManager::iter()->set((DeviceAzimuth)request->id(), params);
+
+    return grpc::Status();
+}
+
+grpc::Status CustomServer::saveTofTransformation(::grpc::ServerContext *context, const ::JetStream::RequestCmd *request,
+                                                 ::JetStream::Response *response) {
+    TofTransformationManager::iter()->save();
+    return grpc::Status();
+}
+
+
 
 StreamRpcServer::StreamRpcServer() {
     thread_ = nullptr;

+ 6 - 0
communication/grpc/streamServer.h

@@ -8,6 +8,7 @@
 #include <grpcpp/grpcpp.h>
 #include "proto/def.grpc.grpc.pb.h"
 #include "proto/def.grpc.pb.h"
+#include "vzense/tof3d_transformation.hpp"
 
 class CustomServer : public JetStream::StreamServer::Service {
 private:
@@ -40,6 +41,11 @@ public:
     virtual ::grpc::Status GetImage(::grpc::ServerContext *context, const ::JetStream::RequestCmd *request,
                                     ::JetStream::ResImage *response);
 
+    virtual ::grpc::Status setTofTransformation(::grpc::ServerContext *context, const ::JetStream::RequestCmd *request,
+                                              ::JetStream::Response *response);
+                                              
+    virtual ::grpc::Status saveTofTransformation(::grpc::ServerContext *context, const ::JetStream::RequestCmd *request,
+                                              ::JetStream::Response *response);
 protected:
     static ::JetStream::Image cvMat2JetImage(cv::Mat &image);
 };

+ 8 - 8
communication/mqtt/mqtt_communication.cpp

@@ -22,7 +22,7 @@ int Tof3DMqttAsyncClient::CloudDataArrived(void *client, char *topicName, int to
         seg_results.ParseFromArray(message->payload, message->payloadlen);
         LabelYoloDataBuffer::iter()->lock_exe(&LabelYoloDataBuffer::set, seg_results);
         int lable = TimeTool::timeDropoutLL(TimeTool::TemporalPrecision::TemporalPrecisionMs, TimeTool::TemporalPrecision::TemporalPrecisionHour);
-        LOG(INFO) << "after " << (lable - seg_results.label()) << "ms";
+        // LOG(INFO) << "after " << (lable - seg_results.label()) << "ms";
     }
     return 1;
 }
@@ -49,21 +49,21 @@ void Tof3DMqttAsyncClient::run() {
                 MatDataBuffer::iter()->lock_exe(&MatDataBuffer::set, device_index, lable, devices_info[device_index].pointMat);
                 devices_info[device_index].irMat.copyTo(merge_mat(cv::Rect((device_index & 0x1) * 640,
                                                                            ((device_index & 0x2) >> 1) * 480, 640, 480)));
-                MatDataBuffer::iter()->lock_exe(&MatDataBuffer::setIr, lable, merge_mat);
             }
+            MatDataBuffer::iter()->lock_exe(&MatDataBuffer::setIr, lable, merge_mat);
             // cv::imwrite(ETC_PATH PROJECT_NAME "/failed/merge_mat_" + std::to_string(lable) + ".jpg", merge_mat);
             JetStream::LabelImage testimage;
-            NetMessageTrans::lableMat2Proto(lable, merge_mat, testimage, false);
+            NetMessageTrans::lableMat2Proto(lable, merge_mat, testimage, true);
             int size_1 = testimage.ByteSizeLong();
             unsigned char data[size_1];
             testimage.SerializeToArray((void *)data, size_1);
             SendMessage("tof/ir", data, size_1);
 
-            int test_lable = 0;
-            cv::Mat pic;
-            NetMessageTrans::Proto2lableMat(testimage, test_lable, pic);
-            cv::imshow("pic", pic);
-            cv::waitKey(1);
+            // int test_lable = 0;
+            // cv::Mat pic;
+            // NetMessageTrans::Proto2lableMat(testimage, test_lable, pic);
+            // cv::imshow("pic", pic);
+            // cv::waitKey(1);
         }
     }
 }

+ 29 - 25
communication/rabbitmq/rabbitmq_communication.cpp

@@ -18,36 +18,37 @@ Error_manager RabbitmqCommunicationTof3D::check_executer(Rabbitmq_message *p_msg
 //处理消息, 需要子类重载
 Error_manager RabbitmqCommunicationTof3D::execute_msg(Rabbitmq_message *p_msg) {
     Error_manager ret;
+//    LOG(INFO) << p_msg->m_routing_key;
+//    if (p_msg->m_routing_key == "dispatch_31_statu_port") {
+//        dispatch_node_statu t_dispatch_node_statu;
+//        if(google::protobuf::TextFormat::ParseFromString(p_msg->m_message_buf, &t_dispatch_node_statu))
+//        {
+//            LOG(INFO) << t_dispatch_node_statu.DebugString();
+//            // TODO:开关门控制算法流程
+////            usleep(5 * 1000 * 1000);
+////            DetectManager::iter()->Stop();
+////            DeviceTof3D::iter()->Stop();
+////
+////            usleep(5 * 1000 * 1000);
+////            DeviceTof3D::iter()->Continue();
+////            DetectManager::iter()->Continue();
+//        }
+//    }
+//
+//    //终端点击存车时, 发送存车表单给检查节点, 这里顺便发送给感测节点.
+//    if (p_msg->m_routing_key == "user_park_command_request_port") {
+//        park_table t_park_table_msg; //存车表单
+//        if(google::protobuf::TextFormat::ParseFromString(p_msg->m_message_buf, &t_park_table_msg))
+//        {
+//            LOG(INFO) << t_park_table_msg.DebugString();
+//            // TODO:从算法取数据保存
+//        }
+//    }
     return ret;
 }
 
 //处理消息, 需要子类重载
 Error_manager RabbitmqCommunicationTof3D::execute_time_consume_msg(Rabbitmq_message *p_msg) {
-    if (p_msg->m_routing_key.find("dispatch_") != std::string::npos &&
-        p_msg->m_routing_key.find("_statu_port") != std::string::npos) {
-        dispatch_node_statu t_dispatch_node_statu;
-        if(google::protobuf::TextFormat::ParseFromString(p_msg->m_message_buf, &t_dispatch_node_statu))
-        {
-            // TODO:开关门控制算法流程
-//            usleep(5 * 1000 * 1000);
-//            DetectManager::iter()->Stop();
-//            DeviceTof3D::iter()->Stop();
-//
-//            usleep(5 * 1000 * 1000);
-//            DeviceTof3D::iter()->Continue();
-//            DetectManager::iter()->Continue();
-        }
-    }
-
-    //终端点击存车时, 发送存车表单给检查节点, 这里顺便发送给感测节点.
-    if (p_msg->m_routing_key == "user_park_command_request_port") {
-        park_table t_park_table_msg; //存车表单
-        if(google::protobuf::TextFormat::ParseFromString(p_msg->m_message_buf, &t_park_table_msg))
-        {
-            // TODO:从算法取数据保存
-        }
-    }
-
     return {};
 }
 
@@ -67,6 +68,9 @@ Error_manager RabbitmqCommunicationTof3D::auto_encapsulate_status() {
     }
     MeasureStatu measure_status = DetectManager::iter()->getMeasureResult(detect_measure_info);
     if (measure_status != MeasureStatu::Measure_OK) {
+        if (measure_status == MeasureStatu::Measure_Border) {
+            rabbit_measure_info.mutable_measure_info_to_plc_forward()->set_border_statu(Range_status::Range_back);
+        }
         rabbit_measure_info.mutable_measure_info_to_plc_forward()->set_ground_status(measure_status);
     }
 

+ 6 - 6
communication/transitData.cpp

@@ -8,21 +8,21 @@ TransitData::TransitData() {
 }
 
 void TransitData::setImage(cv::Mat image, int id) {
-    if (id < 0 || id >= NUM_) return;
+    if (id < 0 || id >= DeviceAzimuth::DEVICE_AZIMUTH_MAX) return;
     mutex_.lock();
     image_[id] = image.clone();
     mutex_.unlock();
 }
 
 void TransitData::setImageCL(cv::Mat &image, int id) {
-    if (id < 0 || id >= NUM_) return;
+    if (id < 0 || id >= DeviceAzimuth::DEVICE_AZIMUTH_MAX) return;
     mutex_.lock();
     imageCL_[id] = image.clone();
     mutex_.unlock();
 }
 
 void TransitData::setMask(std::vector<cv::Point> mask, int id) {
-    if (id < 0 || id >= NUM_) return;
+    if (id < 0 || id >= DeviceAzimuth::DEVICE_AZIMUTH_MAX) return;
     mutex_.lock();
     mask_[id] = mask;
     mutex_.unlock();
@@ -35,7 +35,7 @@ void TransitData::setMeasureInfo(MeasureInfo info) {
 }
 
 bool TransitData::getImage(cv::Mat &image, int id, float timeout) {
-    if (id < 0 || id >= NUM_) return false;
+    if (id < 0 || id >= DeviceAzimuth::DEVICE_AZIMUTH_MAX) return false;
     if (image_[id].timeout(timeout))
         return false;
     mutex_.lock();
@@ -45,7 +45,7 @@ bool TransitData::getImage(cv::Mat &image, int id, float timeout) {
 }
 
 bool TransitData::getImageCL(cv::Mat &image, int id, float timeout) {
-    if (id < 0 || id >= NUM_) return false;
+    if (id < 0 || id >= DeviceAzimuth::DEVICE_AZIMUTH_MAX) return false;
     if (imageCL_[id].timeout(timeout))
         return false;
     mutex_.lock();
@@ -55,7 +55,7 @@ bool TransitData::getImageCL(cv::Mat &image, int id, float timeout) {
 }
 
 bool TransitData::getMask(std::vector<cv::Point> &mask, int id, float timeout) {
-    if (id < 0 || id >= NUM_) return false;
+    if (id < 0 || id >= DeviceAzimuth::DEVICE_AZIMUTH_MAX) return false;
     if (mask_[id].timeout(timeout))
         return false;
     mask = mask_[id]();

+ 4 - 5
communication/transitData.h

@@ -10,10 +10,9 @@
 #include <pcl/point_cloud.h>
 #include "tool/timedlockdata.hpp"
 #include "tool/singleton.hpp"
+#include "proto/enum_type.pb.h"
 #include <mutex>
 
-#define NUM_ 4
-
 class TransitData : public Singleton<TransitData> {
     friend Singleton<TransitData>;
 public:
@@ -68,10 +67,10 @@ private:
     TransitData();
 
     std::mutex mutex_;
-    TimedLockData<cv::Mat> image_[NUM_];
-    TimedLockData<cv::Mat> imageCL_[NUM_];
+    TimedLockData<cv::Mat> image_[DeviceAzimuth::DEVICE_AZIMUTH_MAX];
+    TimedLockData<cv::Mat> imageCL_[DeviceAzimuth::DEVICE_AZIMUTH_MAX];
 
-    TimedLockData<std::vector<cv::Point>> mask_[NUM_];
+    TimedLockData<std::vector<cv::Point>> mask_[DeviceAzimuth::DEVICE_AZIMUTH_MAX];
     TimedLockData<MeasureInfo> measureInfo_;
 
 };

+ 1 - 24
detect/data_buf_lable.hpp

@@ -2,34 +2,11 @@
 #include <iostream>
 #include <map>
 #include <chrono>
-#include <thread>
-#include <mutex>
 #include <opencv2/opencv.hpp>
 
 #include "proto/def.grpc.pb.h"
+#include "tool/lock_class.hpp"
 
-template <class Type>
-class lockFunc {
-public:
-    template<class Func, class ... Args>
-    int lock_exe(Func func, Args&&... args) {
-        lock.lock();
-        bool ret = ((Type* )this->*func)(args...);
-        lock.unlock();
-        return ret;
-    }
-
-    template<class Func, class ... Args>
-    int lock_exe(Func func, Type* p, Args&&... args) {
-        lock.lock();
-        bool ret = (p->*func)(args...);
-        lock.unlock();
-        return ret;
-    }
-
-private:
-    std::mutex lock;
-};
 
 class MatDataBuffer : public lockFunc<MatDataBuffer>{
 public:

+ 109 - 56
detect/detect_manager.cpp

@@ -6,7 +6,8 @@
 
 Error_manager DetectManager::Init(const google::protobuf::RepeatedPtrField<tof3dVzenseEtc> &tof_devices) {
     for (int device_index = 0; device_index < DeviceAzimuth::DEVICE_AZIMUTH_MAX; device_index++) {
-        updateTrans((DeviceAzimuth)device_index, tof_devices[device_index].trans());
+        TofTransformationManager::iter()->set((DeviceAzimuth)device_index, tof_devices[device_index].trans());
+        updateTrans((DeviceAzimuth) device_index, tof_devices[device_index].trans());
     }
 
     m_thread = new std::thread(&DetectManager::run, this);
@@ -31,13 +32,63 @@ Error_manager DetectManager::updateTrans(const DeviceAzimuth &azimuth, const Coo
     move << trans.x(), trans.y(), trans.z();
     rotaition.block<3, 3>(0, 0) = rotation_matrix3;
     rotaition.col(3) << move;
-    LOG(INFO) << "device " << azimuth << " update rotation info: \n" << rotaition;
     rotation_matrix4[azimuth] = rotaition;
     return {};
 }
 
 #include "tool/time.hpp"
 
+bool boxIsValid(const JetStream::SegBox& box, int min ,bool reverse=false){
+    if(box.confidence()<0.8){
+        return false;
+    }
+    int left_side = box.x();
+    if(reverse){
+        left_side=640-(box.x()+box.width());
+    }
+
+    return left_side > min;
+}
+
+int IsInImageValidRegion(const JetStream::LabelYolo &labels) {
+    if(labels.boxes_size()!=4){
+        return -1; //没有识别框
+    }
+
+    for (int device_index = 0; device_index < 4; device_index++) {
+        // LOG(INFO) << device_index << ": "<< labels.boxes(device_index).confidence() << " " << labels.boxes(device_index).x();
+    }
+
+    JetStream::SegBox box2=labels.boxes(1);
+    JetStream::SegBox box4=labels.boxes(3);
+    bool frontExsit = labels.boxes(0).confidence() > 0.8 || labels.boxes(1).confidence() > 0.8;
+    bool backExist = labels.boxes(2).confidence() > 0.8 || labels.boxes(3).confidence() > 0.8;
+    if(!frontExsit){
+        if(backExist){  //后有前没有
+            return 1;  //往前
+        }else{  //前后都没有
+            return -1;
+        }
+    }else{
+        if(backExist){
+           bool front_OK = boxIsValid(labels.boxes(1), 20) || boxIsValid(labels.boxes(0), 20,true);
+            bool back_OK = boxIsValid(labels.boxes(2), 20,true) || boxIsValid(labels.boxes(3), 20);
+            if(front_OK && back_OK){
+                return 0;
+            }else{
+                return 1;
+            }
+        }else{  // 前有后没有
+            bool front_OK=boxIsValid(labels.boxes(1),320) || boxIsValid(labels.boxes(0),320,true);
+            if(!front_OK){
+                return 1;
+            }else{
+                return -1;
+            }
+        }
+    }
+}
+
 void DetectManager::run() {
     LOG(INFO) << "detect thread running in " << std::this_thread::get_id();
 
@@ -49,7 +100,6 @@ void DetectManager::run() {
 
     auto t_start_time = std::chrono::steady_clock::now();
     std::chrono::duration<double> cost = std::chrono::steady_clock::now() - t_start_time;
-    RecordImages record_images;
 
     while (m_condit.is_alive()) {
         m_condit.wait();
@@ -68,46 +118,18 @@ void DetectManager::run() {
             if (!LabelYoloDataBuffer::iter()->lock_exe(&LabelYoloDataBuffer::get, seg_results, 10)) {
                 continue;
             }
-            int lable = TimeTool::timeDropoutLL(TimeTool::TemporalPrecision::TemporalPrecisionMs, TimeTool::TemporalPrecision::TemporalPrecisionHour);
-            LOG(INFO) << "after " << (lable - seg_results.label()) << "ms";
             // 判断识别结果数量以及是否存图
             int seg_num = 0;
             for (int device_index = 0; device_index < DeviceAzimuth::DEVICE_AZIMUTH_MAX; device_index++) {
+                // LOG(INFO) << device_index << ": " << seg_results.boxes(device_index).confidence();
                 if (seg_results.boxes(device_index).confidence() > 0.7) {
                     seg_num++;
                 }
             }
-            if (seg_num == 1 && record_images.name_1.empty()) {
-                if (MatDataBuffer::iter()->lock_exe(&MatDataBuffer::getIr, seg_results.label(), record_images.img_1, 10)) {
-                    record_images.name_1 = TimeTool::timeDropoutStr();
-                } else {
-                    record_images.name_1.clear();
-                }
-
-            }
-            if (seg_num == 2 && record_images.name_2.empty()) {
-                if (MatDataBuffer::iter()->lock_exe(&MatDataBuffer::getIr, seg_results.label(), record_images.img_2, 10)) {
-                    record_images.name_2 = TimeTool::timeDropoutStr();
-                } else {
-                    record_images.name_2.clear();
-                }
-
-            }
-            if (seg_num == 3 && record_images.name_3.empty()) {
-                if (MatDataBuffer::iter()->lock_exe(&MatDataBuffer::getIr, seg_results.label(), record_images.img_3, 10)) {
-                    record_images.name_3 = TimeTool::timeDropoutStr();
-                } else {
-                    record_images.name_3.clear();
-                }
-
-            }
-            if (seg_num == 4 && record_images.name_4.empty()) {
-                if (MatDataBuffer::iter()->lock_exe(&MatDataBuffer::getIr, seg_results.label(), record_images.img_4, 10)) {
-                    record_images.name_4 = TimeTool::timeDropoutStr();
-                } else {
-                    record_images.name_4.clear();
-                }
-
+            if (seg_num == 0) {
+                detect_statu = MeasureStatu::Measure_Empty_Clouds;
+                m_detect_result_record.reset();
+                continue;
             }
             // 取高置信度识别结果
             for (int device_index = 0; device_index < DeviceAzimuth::DEVICE_AZIMUTH_MAX; device_index++) {
@@ -117,21 +139,39 @@ void DetectManager::run() {
                 }
                 cv::Mat pointMat;
                 //LOG(INFO) << "b get data" << " empty";
-                if (MatDataBuffer::iter()->lock_exe(&MatDataBuffer::get, device_index, seg_results.label(), pointMat, 10)) {
-                    cv::imshow(std::to_string(device_index), pointMat);
-                    cv::waitKey(1);
+                if (MatDataBuffer::iter()->lock_exe(&MatDataBuffer::get, device_index, seg_results.label(), pointMat,
+                                                    10)) {
                     have_cloud.push_back(device_index);
+                    std::vector<cv::Point> seg_points;
                     for (int row = 0; row < 480; row++) {
-                        for (int col = seg_results.boxes(device_index).lines(row).begin(); col < seg_results.boxes(device_index).lines(row).end(); col++) {
+                        for (int col = seg_results.boxes(device_index).lines(row).begin();
+                             col < seg_results.boxes(device_index).lines(row).end(); col++) {
+                            seg_points.emplace_back(col, row);
                             vct_wheel_cloud[device_index]->emplace_back(pointMat.at<cv::Vec3f>(row, col)[0] * 0.001,
                                                                         pointMat.at<cv::Vec3f>(row, col)[1] * 0.001,
                                                                         pointMat.at<cv::Vec3f>(row, col)[2] * 0.001);
                         }
                     }
-                    pcl::transformPointCloud(*vct_wheel_cloud[device_index], *vct_wheel_cloud[device_index], rotation_matrix4[device_index]());
+                    TransitData::get_instance_pointer()->setMask(seg_points, device_index);
+                    CoordinateTransformation3D device_trans;
+                    // 更新旋转坐标
+                    if (TofTransformationManager::iter()->get((DeviceAzimuth)device_index, device_trans)) {
+                        updateTrans((DeviceAzimuth)device_index, device_trans);
+                    }
+                    pcl::transformPointCloud(*vct_wheel_cloud[device_index], *vct_wheel_cloud[device_index],
+                                             rotation_matrix4[device_index]());
                     // Point3D_tool::WriteTxtCloud(ETC_PATH PROJECT_NAME "/data/cloud_" + std::to_string(device_index) + ".txt", vct_wheel_cloud[device_index]);
                 }
             }
+            int flag = IsInImageValidRegion(seg_results);
+            if (flag == 1) {
+                LOG(INFO) << "向前";
+                detect_statu = MeasureStatu::Measure_Border;
+                continue;
+            } else if (flag == -1){
+                LOG(WARNING) << "缺少轮子";
+                continue;
+            }
 
             // 点云处理
             if (!have_cloud.empty()) {
@@ -152,13 +192,19 @@ void DetectManager::run() {
 
                 // 根据车轮数采用不同的优化方式
                 if (m_detect_result_record.car.wheel_width < 1.0 || m_detect_result_record.car.wheel_base < 2.0 ||
-                        have_cloud.size() > 2) {
-                    ceres_detect_ret = wheel_detect->detect_2Step(vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LF], vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RF],
-                                    vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LR], vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RR],
-                                    detect_result.car.wheels_center_x, detect_result.car.wheels_center_y,
-                                    detect_result.car.theta, detect_result.car.wheel_base,
-                                    detect_result.car.wheel_width,detect_result.car.front_wheels_theta, detect_result.car.loss);
-                    if ((m_detect_result_record.car.wheel_width < 1.0 && m_detect_result_record.car.wheel_base < 1.0 && have_cloud.size() == 2) || have_cloud.size() < 2) {
+                    have_cloud.size() > 2) {
+                    ceres_detect_ret = wheel_detect->detect_2Step(vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LF],
+                                                                  vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RF],
+                                                                  vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LR],
+                                                                  vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RR],
+                                                                  detect_result.car.wheels_center_x,
+                                                                  detect_result.car.wheels_center_y,
+                                                                  detect_result.car.theta, detect_result.car.wheel_base,
+                                                                  detect_result.car.wheel_width,
+                                                                  detect_result.car.front_wheels_theta,
+                                                                  detect_result.car.loss);
+                    if ((m_detect_result_record.car.wheel_width < 1.0 && m_detect_result_record.car.wheel_base < 1.0 &&
+                         have_cloud.size() == 2) || have_cloud.size() < 2) {
                         detect_result.car.wheel_width = m_detect_result_record.car.wheel_width;
                         detect_result.car.wheel_base = m_detect_result_record.car.wheel_base;
                     } else if (have_cloud.size() == 2) {
@@ -168,18 +214,24 @@ void DetectManager::run() {
                             }
                         }
 
-                        if (have_cloud[0] % 2 == have_cloud[1] %2 ) {
+                        if (have_cloud[0] % 2 == have_cloud[1] % 2) {
                             detect_result.car.wheel_width = m_detect_result_record.car.wheel_width;
                         } else {
                             detect_result.car.wheel_base = m_detect_result_record.car.wheel_base;
                         }
                     }
                 } else {
-                    ceres_detect_ret = wheel_detect->detect_dynP(vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LF], vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RF],
-                                         vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LR], vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RR],
-                                         detect_result.car.wheels_center_x, detect_result.car.wheels_center_y,
-                                         detect_result.car.theta, m_detect_result_record.car.wheel_base,
-                                         m_detect_result_record.car.wheel_width,detect_result.car.front_wheels_theta, detect_result.car.loss);
+                    ceres_detect_ret = wheel_detect->detect_dynP(vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LF],
+                                                                 vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RF],
+                                                                 vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_LR],
+                                                                 vct_wheel_cloud[DeviceAzimuth::DEVICE_AZIMUTH_RR],
+                                                                 detect_result.car.wheels_center_x,
+                                                                 detect_result.car.wheels_center_y,
+                                                                 detect_result.car.theta,
+                                                                 m_detect_result_record.car.wheel_base,
+                                                                 m_detect_result_record.car.wheel_width,
+                                                                 detect_result.car.front_wheels_theta,
+                                                                 detect_result.car.loss);
                     detect_result.car.wheel_base = m_detect_result_record.car.wheel_base;
                     detect_result.car.wheel_width = m_detect_result_record.car.wheel_width;
                 }
@@ -221,13 +273,14 @@ void DetectManager::stop() {
     m_thread = nullptr;
 }
 
-Error_manager DetectManager::WheelCloudOptimize(std::vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> &vct_wheel_cloud, DetectResult &result) {
+Error_manager DetectManager::WheelCloudOptimize(std::vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> &vct_wheel_cloud,
+                                                DetectResult &result) {
     //下采样
     pcl::VoxelGrid<pcl::PointXYZ> vox;                     //创建滤波对象
     vox.setLeafSize(0.03f, 0.03f, 0.03f);     //设置滤波时创建的体素体积为1cm的立方体
     pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_cloud(new pcl::PointCloud<pcl::PointXYZ>);
     for (int device_index = 0; device_index < DeviceAzimuth::DEVICE_AZIMUTH_MAX; device_index++) {
-        if (vct_wheel_cloud[device_index]->empty()) { continue;}
+        if (vct_wheel_cloud[device_index]->empty()) { continue; }
 
         vox.setInputCloud(vct_wheel_cloud[device_index]);                   //设置需要过滤的点云给滤波对象
         vox.filter(*vct_wheel_cloud[device_index]);                         //执行滤波处理,存储输出

+ 20 - 3
proto/def.grpc.proto

@@ -1,10 +1,21 @@
 syntax = "proto3";
 package JetStream;
+
+message CoordinateTransformation3D {
+  optional float x = 1;
+  optional float y = 2 ;
+  optional float z = 3 ;
+  optional float roll = 4 ;
+  optional float pitch = 5;
+  optional float yaw = 6 ;
+}
+
 message RequestCmd
 {
   int32 Id=1;  // 对于获取对应id的相机数据。1,2,3,4,其他表示所有
-  //对于打开视频流
+  optional CoordinateTransformation3D params = 2;
 }
+
 message Response{
   string info=2;
 }
@@ -28,8 +39,12 @@ message Line{
   int32 end=2;
 }
 message SegBox{
-  float confidence = 1;
-  repeated Line lines= 2;
+  int32 x = 1;
+  int32 y = 2;
+  int32 width = 3;
+  int32 height = 4;
+  float confidence = 5;
+  repeated Line lines= 6;
 }
 message LabelYolo{
   int32 label=1;
@@ -75,6 +90,8 @@ service StreamServer{
   rpc CloseMeasureDataStream(RequestCmd) returns(Response){}
   rpc GetCloud(RequestCmd) returns(ResCloud){}
   rpc GetImage(RequestCmd) returns(ResImage){}
+  rpc setTofTransformation(RequestCmd) returns(Response){}
+  rpc saveTofTransformation(RequestCmd) returns(Response){}
 }
 message Boundary{
   float radius=1; //转台半径

+ 33 - 2
vzense/device_tof3d.cpp

@@ -532,9 +532,10 @@ void DeviceTof3D::run(const DeviceAzimuth &azimuth) {
 
     auto t_start_time = std::chrono::steady_clock::now();
     std::chrono::duration<double> cost = std::chrono::steady_clock::now() - t_start_time;
+    TofTransformationManager::iter()->set(azimuth, device.etc->trans());
 
     LOG(INFO) << device.etc->ip() << ": " << azimuth << " in thread " << std::this_thread::get_id();
-    updateTrans(azimuth, device.etc->trans());
+    CoordinateTransformation3D device_trans;
     while (device.condit->is_alive()) {
         device.condit->wait();
         cost = std::chrono::steady_clock::now() - t_start_time;
@@ -545,7 +546,11 @@ void DeviceTof3D::run(const DeviceAzimuth &azimuth) {
             Error_manager ret;
             DeviceTof3DSaveInfo depthInfo;
             Error_manager vz_ret = {FAILED, NORMAL};
-
+            // 更新旋转坐标
+            if (TofTransformationManager::iter()->get(azimuth, device_trans)) {
+                updateTrans(azimuth, device_trans);
+            }
+            // 相机采集数据
             device.handle_mutex->lock();
             if (device.handle) {
                 vz_ret = getDepthAndIrPicture(device.handle, depthFrame, irFrame);
@@ -563,6 +568,32 @@ void DeviceTof3D::run(const DeviceAzimuth &azimuth) {
                 Frame2Mat(irFrame, depthInfo.irMat);
                 memcpy(depthInfo.pointMat.data, worldV, 480 * 640 * sizeof(VzVector3f));
                 m_device_mat[azimuth] = depthInfo;
+                // 点云转换
+                t_ir_img = depthInfo.irMat.clone();
+                t_point_img = cv::Mat::zeros(480, 640, CV_32FC4);
+                for (int cols = 0; cols < depthInfo.pointMat.cols - 0; cols++) {
+                    for (int rows = 0; rows < depthInfo.pointMat.rows - 0; rows++) {
+                        int i = rows * depthInfo.pointMat.cols + cols;
+                        if (sqrt(worldV[i].x * worldV[i].x + worldV[i].y * worldV[i].y + worldV[i].z * worldV[i].z) *
+                            0.001f > 5) {
+                            continue;
+                        }
+                        Eigen::Vector3f trans;
+                        trans << worldV[i].x * 0.001f, worldV[i].y * 0.001f, worldV[i].z * 0.001f;
+                        trans = rotation_matrix3[azimuth] * trans + move[azimuth];
+                        if (trans(2) < 0.03 || fabs(trans(0)) > 1.5 || fabs(trans(1)) > 2.7 || trans(2) > 1) {
+                            // continue;
+                        }
+                        t_point_img.at<cv::Vec4f>(rows, cols)[0] = trans(0);
+                        t_point_img.at<cv::Vec4f>(rows, cols)[1] = trans(1);
+                        t_point_img.at<cv::Vec4f>(rows, cols)[2] = trans(2);
+                    }
+                }
+                
+                // 将图片和点云存储起来
+                TransitData::get_instance_pointer()->setImage(t_ir_img, azimuth);
+                TransitData::get_instance_pointer()->setImageCL(t_point_img, azimuth);
+                
             }
         } else {
             LOG_EVERY_N(WARNING, 1000) << device.etc->ip() << " thread was killed.";

+ 1 - 0
vzense/device_tof3d.h

@@ -20,6 +20,7 @@
 #include "tool/error_code.hpp"
 #include "tool/timedlockdata.hpp"
 #include "tool/point3D_tool.hpp"
+#include "tof3d_transformation.hpp"
 
 class DeviceTof3D {
 public: