main.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358
  1. #include <limits>
  2. #include <cassert>
  3. #include <cmath>
  4. #include "../common/common.hpp"
  5. #include "../common/cloud_viewer/cloud_viewer.hpp"
  6. #include "TYImageProc.h"
  7. #define MAP_DEPTH_TO_COLOR 1
  8. struct CallbackData {
  9. int index;
  10. TY_DEV_HANDLE hDevice;
  11. TY_ISP_HANDLE isp_handle;
  12. TY_CAMERA_CALIB_INFO depth_calib;
  13. TY_CAMERA_CALIB_INFO color_calib;
  14. float f_depth_scale;
  15. bool saveOneFramePoint3d;
  16. bool exit_main;
  17. int fileIndex;
  18. bool map_depth_to_color;
  19. };
  20. CallbackData cb_data;
  21. TY_ISP_HANDLE isp_handle = NULL;
  22. bool isTof = false;
  23. cv::Mat tofundis_mapx, tofundis_mapy;
  24. //////////////////////////////////////////////////
  25. static void doRegister(const TY_CAMERA_CALIB_INFO& depth_calib
  26. , const TY_CAMERA_CALIB_INFO& color_calib
  27. , const cv::Mat& depth
  28. , const float f_scale_unit
  29. , const cv::Mat& color
  30. , cv::Mat& undistort_color
  31. , cv::Mat& out
  32. , bool map_depth_to_color
  33. )
  34. {
  35. // do undistortion
  36. TY_IMAGE_DATA src;
  37. src.width = color.cols;
  38. src.height = color.rows;
  39. src.size = color.size().area() * 3;
  40. src.pixelFormat = TY_PIXEL_FORMAT_RGB;
  41. src.buffer = color.data;
  42. undistort_color = cv::Mat(color.size(), CV_8UC3);
  43. TY_IMAGE_DATA dst;
  44. dst.width = color.cols;
  45. dst.height = color.rows;
  46. dst.size = undistort_color.size().area() * 3;
  47. dst.buffer = undistort_color.data;
  48. dst.pixelFormat = TY_PIXEL_FORMAT_RGB;
  49. ASSERT_OK(TYUndistortImage(&color_calib, &src, NULL, &dst));
  50. // do register
  51. if (map_depth_to_color) {
  52. out = cv::Mat::zeros(undistort_color.size(), CV_16U);
  53. ASSERT_OK(
  54. TYMapDepthImageToColorCoordinate(
  55. &depth_calib,
  56. depth.cols, depth.rows, depth.ptr<uint16_t>(),
  57. &color_calib,
  58. out.cols, out.rows, out.ptr<uint16_t>(), f_scale_unit
  59. )
  60. );
  61. cv::Mat temp;
  62. //you may want to use median filter to fill holes in projected depth image
  63. //or do something else here
  64. cv::medianBlur(out, temp, 5);
  65. out = temp;
  66. }
  67. else {
  68. out = cv::Mat::zeros(depth.size(), CV_8UC3);
  69. ASSERT_OK(
  70. TYMapRGBImageToDepthCoordinate(
  71. &depth_calib,
  72. depth.cols, depth.rows, depth.ptr<uint16_t>(),
  73. &color_calib,
  74. undistort_color.cols, undistort_color.rows, undistort_color.ptr<uint8_t>(),
  75. out.ptr<uint8_t>(), f_scale_unit
  76. )
  77. );
  78. undistort_color = out.clone();
  79. }
  80. }
  81. static void handleFrame(TY_FRAME_DATA* frame, void* userdata) {
  82. //we only using Opencv Mat as data container.
  83. //you can allocate memory by yourself.
  84. CallbackData* pData = (CallbackData*) userdata;
  85. LOGD("=== Get frame %d", ++pData->index);
  86. cv::Mat depth, color;
  87. parseFrame(*frame, &depth, NULL, NULL, &color, isp_handle);
  88. if(!depth.empty()){
  89. if (isTof)
  90. {
  91. TY_IMAGE_DATA src;
  92. src.width = depth.cols;
  93. src.height = depth.rows;
  94. src.size = depth.size().area() * 2;
  95. src.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;
  96. src.buffer = depth.data;
  97. cv::Mat undistort_depth = cv::Mat(depth.size(), CV_16U);
  98. TY_IMAGE_DATA dst;
  99. dst.width = depth.cols;
  100. dst.height = depth.rows;
  101. dst.size = undistort_depth.size().area() * 2;
  102. dst.buffer = undistort_depth.data;
  103. dst.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;
  104. ASSERT_OK(TYUndistortImage(&cb_data.depth_calib, &src, NULL, &dst));
  105. depth = undistort_depth.clone();
  106. }
  107. std::vector<TY_VECT_3F> p3d;
  108. uint8_t *color_data = NULL;
  109. cv::Mat color_data_mat, out;
  110. if (!color.empty()){
  111. bool hasColorCalib = false;
  112. ASSERT_OK(TYHasFeature(pData->hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA, &hasColorCalib));
  113. if (hasColorCalib)
  114. {
  115. doRegister(pData->depth_calib, pData->color_calib, depth, pData->f_depth_scale, color, color_data_mat, out, pData->map_depth_to_color);
  116. cv::cvtColor(color_data_mat, color_data_mat, cv::COLOR_BGR2RGB);
  117. color_data = color_data_mat.ptr<uint8_t>();
  118. }
  119. }
  120. if (pData->map_depth_to_color) {
  121. depth = out.clone();
  122. p3d.resize(depth.size().area());
  123. ASSERT_OK(TYMapDepthImageToPoint3d(&pData->color_calib, depth.cols, depth.rows
  124. , (uint16_t*)depth.data, &p3d[0]));
  125. }
  126. else
  127. {
  128. p3d.resize(depth.size().area());
  129. ASSERT_OK(TYMapDepthImageToPoint3d(&pData->depth_calib, depth.cols, depth.rows
  130. , (uint16_t*)depth.data, &p3d[0], pData->f_depth_scale));
  131. }
  132. if (pData->saveOneFramePoint3d){
  133. char file[32];
  134. sprintf(file, "points-%d.xyz", pData->fileIndex++);
  135. writePointCloud((cv::Point3f*)&p3d[0], (const cv::Vec3b*)color_data_mat.data, p3d.size(), file, PC_FILE_FORMAT_XYZ);
  136. pData->saveOneFramePoint3d = false;
  137. }
  138. for (int idx = 0; idx < p3d.size(); idx++){//we adjust coordinate for display
  139. p3d[idx].y = -p3d[idx].y;
  140. p3d[idx].z = -p3d[idx].z;
  141. }
  142. GLPointCloudViewer::Update(p3d.size(), &p3d[0], color_data);
  143. }
  144. }
  145. void eventCallback(TY_EVENT_INFO *event_info, void *userdata)
  146. {
  147. if (event_info->eventId == TY_EVENT_DEVICE_OFFLINE) {
  148. LOGD("=== Event Callback: Device Offline!");
  149. // Note:
  150. // Please set TY_BOOL_KEEP_ALIVE_ONOFF feature to false if you need to debug with breakpoint!
  151. }
  152. else if (event_info->eventId == TY_EVENT_LICENSE_ERROR) {
  153. LOGD("=== Event Callback: License Error!");
  154. }
  155. }
  156. static int FetchOneFrame(CallbackData &cb){
  157. TY_FRAME_DATA frame;
  158. int err = TYFetchFrame(cb.hDevice, &frame, -1);
  159. if (err != TY_STATUS_OK){
  160. LOGD("... Drop one frame");
  161. return -1;
  162. }
  163. handleFrame(&frame, &cb);
  164. LOGD("=== Re-enqueue buffer(%p, %d)", frame.userBuffer, frame.bufferSize);
  165. TYEnqueueBuffer(cb.hDevice, frame.userBuffer, frame.bufferSize);
  166. TYISPUpdateDevice(cb.isp_handle);
  167. return 0;
  168. }
  169. void* FetchFrameThreadFunc(void* d){
  170. CallbackData &cb = *(CallbackData*)d;
  171. while(!cb.exit_main){
  172. if (FetchOneFrame(cb) != 0){
  173. break;
  174. }
  175. }
  176. return NULL;
  177. }
  178. bool key_pressed(int key){
  179. if (key == 's'){
  180. cb_data.saveOneFramePoint3d = true;
  181. return true;
  182. }
  183. return false;
  184. }
  185. int main(int argc, char* argv[])
  186. {
  187. GLPointCloudViewer::GlInit();
  188. std::string ID, IP;
  189. TY_INTERFACE_HANDLE hIface = NULL;
  190. TY_DEV_HANDLE hDevice = NULL;
  191. bool with_color_cam = true;
  192. bool dep2rgb = false;
  193. for(int i = 1; i < argc; i++){
  194. if(strcmp(argv[i], "-id") == 0){
  195. ID = argv[++i];
  196. } else if(strcmp(argv[i], "-ip") == 0) {
  197. IP = argv[++i];
  198. } else if(strcmp(argv[i], "-dep2rgb") == 0) {
  199. dep2rgb = true;
  200. }else if(strcmp(argv[i], "-h") == 0){
  201. printf("Usage: SimpleView_Point3D [-h] [-id <ID>] [-color=off] [-dep2rgb]");
  202. return 0;
  203. }
  204. else if (strcmp(argv[i], "-color=off") == 0){
  205. with_color_cam = false;
  206. }
  207. }
  208. LOGD("=== Init lib");
  209. ASSERT_OK( TYInitLib() );
  210. TY_VERSION_INFO ver;
  211. ASSERT_OK( TYLibVersion(&ver) );
  212. LOGD(" - lib version: %d.%d.%d", ver.major, ver.minor, ver.patch);
  213. std::vector<TY_DEVICE_BASE_INFO> selected;
  214. ASSERT_OK( selectDevice(TY_INTERFACE_ALL, ID, IP, 1, selected) );
  215. ASSERT(selected.size() > 0);
  216. TY_DEVICE_BASE_INFO& selectedDev = selected[0];
  217. ASSERT_OK( TYOpenInterface(selectedDev.iface.id, &hIface) );
  218. ASSERT_OK( TYOpenDevice(hIface, selectedDev.id, &hDevice) );
  219. LOGD("=== Configure components, open depth cam");
  220. int32_t componentIDs = TY_COMPONENT_DEPTH_CAM;
  221. ASSERT_OK( TYEnableComponents(hDevice, componentIDs) );
  222. //try to enable depth map
  223. LOGD("Configure components, open depth cam");
  224. if (componentIDs & TY_COMPONENT_DEPTH_CAM) {
  225. TY_IMAGE_MODE image_mode;
  226. ASSERT_OK(get_default_image_mode(hDevice, TY_COMPONENT_DEPTH_CAM, image_mode));
  227. LOGD("Select Depth Image Mode: %dx%d", TYImageWidth(image_mode), TYImageHeight(image_mode));
  228. ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, image_mode));
  229. ASSERT_OK(TYEnableComponents(hDevice, TY_COMPONENT_DEPTH_CAM));
  230. float scale_unit = 1.;
  231. bool hasScaleUint = false;
  232. //Incase some model Desc has No ScaleUint Now(Tof), Then Suppose it is 1.0f
  233. TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_FLOAT_SCALE_UNIT, &hasScaleUint);
  234. if(hasScaleUint) {
  235. TYGetFloat(hDevice, TY_COMPONENT_DEPTH_CAM, TY_FLOAT_SCALE_UNIT, &scale_unit);
  236. }
  237. cb_data.f_depth_scale = scale_unit;
  238. }
  239. TY_COMPONENT_ID allComps;
  240. ASSERT_OK(TYGetComponentIDs(hDevice, &allComps));
  241. if ((allComps & TY_COMPONENT_RGB_CAM) && (with_color_cam)){
  242. LOGD("=== Has internal RGB camera, try to open it");
  243. ASSERT_OK(TYEnableComponents(hDevice, TY_COMPONENT_RGB_CAM));
  244. bool hasColorCalib = false;
  245. ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA, &hasColorCalib));
  246. if (hasColorCalib)
  247. {
  248. ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA
  249. , &cb_data.color_calib, sizeof(cb_data.color_calib)));
  250. }
  251. ASSERT_OK(TYISPCreate(&isp_handle)); //create a default isp handle for bayer rgb images
  252. cb_data.isp_handle = isp_handle;
  253. ASSERT_OK(ColorIspInitSetting(isp_handle, hDevice));
  254. //You can turn on auto exposure function as follow ,but frame rate may reduce .
  255. //Device also may be casually stucked 1~2 seconds when software trying to adjust device exposure time value
  256. #if 0
  257. ASSERT_OK(ColorIspInitAutoExposure(isp_handle, hDevice));
  258. #endif
  259. }
  260. LOGD("=== Prepare image buffer");
  261. uint32_t frameSize;
  262. ASSERT_OK( TYGetFrameBufferSize(hDevice, &frameSize) );
  263. LOGD(" - Get size of framebuffer, %d", frameSize);
  264. LOGD(" - Allocate & enqueue buffers");
  265. char* frameBuffer[2];
  266. frameBuffer[0] = new char[frameSize];
  267. frameBuffer[1] = new char[frameSize];
  268. LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[0], frameSize);
  269. ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[0], frameSize) );
  270. LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[1], frameSize);
  271. ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[1], frameSize) );
  272. LOGD("=== Read depth intrinsic");
  273. ASSERT_OK( TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA
  274. , &cb_data.depth_calib, sizeof(cb_data.depth_calib)));
  275. ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_DISTORTION, &isTof));
  276. LOGD("=== Register event callback");
  277. ASSERT_OK(TYRegisterEventCallback(hDevice, eventCallback, NULL));
  278. bool hasTrigger = false;
  279. ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &hasTrigger));
  280. if (hasTrigger) {
  281. LOGD("=== Disable trigger mode");
  282. TY_TRIGGER_PARAM trigger;
  283. trigger.mode = TY_TRIGGER_MODE_OFF;
  284. ASSERT_OK(TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &trigger, sizeof(trigger)));
  285. }
  286. LOGD("=== Start capture");
  287. ASSERT_OK( TYStartCapture(hDevice) );
  288. cb_data.index = 0;
  289. cb_data.hDevice = hDevice;
  290. cb_data.saveOneFramePoint3d = false;
  291. cb_data.fileIndex = 0;
  292. cb_data.exit_main = false;
  293. cb_data.map_depth_to_color = dep2rgb;
  294. //start a thread to fetch image data
  295. TYThread fetch_thread;
  296. fetch_thread.create(FetchFrameThreadFunc, &cb_data);
  297. LOGD("=== While loop to fetch frame");
  298. FetchOneFrame(cb_data);
  299. GLPointCloudViewer::ResetViewTranslate();//init view position by first frame
  300. GLPointCloudViewer::RegisterKeyCallback(key_pressed);//key pressed callback
  301. GLPointCloudViewer::EnterMainLoop();//start main window
  302. cb_data.exit_main = true;//wait work thread to exit
  303. fetch_thread.destroy();
  304. ASSERT_OK( TYStopCapture(hDevice) );
  305. ASSERT_OK( TYCloseDevice(hDevice) );
  306. ASSERT_OK( TYCloseInterface(hIface) );
  307. ASSERT_OK( TYDeinitLib() );
  308. delete frameBuffer[0];
  309. delete frameBuffer[1];
  310. if (isp_handle){
  311. TYISPRelease(&isp_handle);
  312. }
  313. LOGD("=== Main done!");
  314. GLPointCloudViewer::Deinit();
  315. return 0;
  316. }