main.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379
  1. #include "../common/common.hpp"
  2. #include "TYImageProc.h"
  3. #define MAP_DEPTH_TO_COLOR 0
  4. struct CallbackData {
  5. int index;
  6. TY_ISP_HANDLE IspHandle;
  7. TY_DEV_HANDLE hDevice;
  8. DepthRender* render;
  9. DepthViewer* depthViewer;
  10. bool needUndistort;
  11. float scale_unit;
  12. bool isTof;
  13. TY_CAMERA_CALIB_INFO depth_calib;
  14. TY_CAMERA_CALIB_INFO color_calib;
  15. };
  16. cv::Mat tofundis_mapx, tofundis_mapy;
  17. static void doRegister(const TY_CAMERA_CALIB_INFO& depth_calib
  18. , const TY_CAMERA_CALIB_INFO& color_calib
  19. , const cv::Mat& depth
  20. , const float f_scale_unit
  21. , const cv::Mat& color
  22. , bool needUndistort
  23. , cv::Mat& undistort_color
  24. , cv::Mat& out
  25. , bool map_depth_to_color
  26. )
  27. {
  28. int32_t image_size;
  29. TY_PIXEL_FORMAT color_fmt;
  30. if(color.type() == CV_16U) {
  31. image_size = color.size().area() * 2;
  32. color_fmt = TY_PIXEL_FORMAT_MONO16;
  33. }
  34. else if(color.type() == CV_16UC3)
  35. {
  36. image_size = color.size().area() * 6;
  37. color_fmt = TY_PIXEL_FORMAT_RGB48;
  38. }
  39. else {
  40. image_size = color.size().area() * 3;
  41. color_fmt = TY_PIXEL_FORMAT_RGB;
  42. }
  43. // do undistortion
  44. if (needUndistort) {
  45. if(color_fmt == TY_PIXEL_FORMAT_MONO16)
  46. undistort_color = cv::Mat(color.size(), CV_16U);
  47. else if(color_fmt == TY_PIXEL_FORMAT_RGB48)
  48. undistort_color = cv::Mat(color.size(), CV_16UC3);
  49. else
  50. undistort_color = cv::Mat(color.size(), CV_8UC3);
  51. TY_IMAGE_DATA src;
  52. src.width = color.cols;
  53. src.height = color.rows;
  54. src.size = image_size;
  55. src.pixelFormat = color_fmt;
  56. src.buffer = color.data;
  57. TY_IMAGE_DATA dst;
  58. dst.width = color.cols;
  59. dst.height = color.rows;
  60. dst.size = image_size;
  61. dst.pixelFormat = color_fmt;
  62. dst.buffer = undistort_color.data;
  63. ASSERT_OK(TYUndistortImage(&color_calib, &src, NULL, &dst));
  64. }
  65. else {
  66. undistort_color = color;
  67. }
  68. // do register
  69. if (map_depth_to_color) {
  70. out = cv::Mat::zeros(undistort_color.size(), CV_16U);
  71. ASSERT_OK(
  72. TYMapDepthImageToColorCoordinate(
  73. &depth_calib,
  74. depth.cols, depth.rows, depth.ptr<uint16_t>(),
  75. &color_calib,
  76. out.cols, out.rows, out.ptr<uint16_t>(), f_scale_unit
  77. )
  78. );
  79. cv::Mat temp;
  80. //you may want to use median filter to fill holes in projected depth image
  81. //or do something else here
  82. cv::medianBlur(out, temp, 5);
  83. out = temp;
  84. }
  85. else {
  86. if(color_fmt == TY_PIXEL_FORMAT_MONO16)
  87. {
  88. out = cv::Mat::zeros(depth.size(), CV_16U);
  89. ASSERT_OK(
  90. TYMapMono16ImageToDepthCoordinate(
  91. &depth_calib,
  92. depth.cols, depth.rows, depth.ptr<uint16_t>(),
  93. &color_calib,
  94. undistort_color.cols, undistort_color.rows, undistort_color.ptr<uint16_t>(),
  95. out.ptr<uint16_t>(), f_scale_unit
  96. )
  97. );
  98. }
  99. else if(color_fmt == TY_PIXEL_FORMAT_RGB48)
  100. {
  101. out = cv::Mat::zeros(depth.size(), CV_16UC3);
  102. ASSERT_OK(
  103. TYMapRGB48ImageToDepthCoordinate(
  104. &depth_calib,
  105. depth.cols, depth.rows, depth.ptr<uint16_t>(),
  106. &color_calib,
  107. undistort_color.cols, undistort_color.rows, undistort_color.ptr<uint16_t>(),
  108. out.ptr<uint16_t>(), f_scale_unit
  109. )
  110. );
  111. }
  112. else{
  113. out = cv::Mat::zeros(depth.size(), CV_8UC3);
  114. ASSERT_OK(
  115. TYMapRGBImageToDepthCoordinate(
  116. &depth_calib,
  117. depth.cols, depth.rows, depth.ptr<uint16_t>(),
  118. &color_calib,
  119. undistort_color.cols, undistort_color.rows, undistort_color.ptr<uint8_t>(),
  120. out.ptr<uint8_t>(), f_scale_unit
  121. )
  122. );
  123. }
  124. }
  125. }
  126. void handleFrame(TY_FRAME_DATA* frame, void* userdata)
  127. {
  128. CallbackData* pData = (CallbackData*)userdata;
  129. LOGD("=== Get frame %d", ++pData->index);
  130. cv::Mat depth, color;
  131. parseFrame(*frame, &depth, 0, 0, &color, pData->IspHandle);
  132. if (!depth.empty()) {
  133. if (pData->isTof)
  134. {
  135. TY_IMAGE_DATA src;
  136. src.width = depth.cols;
  137. src.height = depth.rows;
  138. src.size = depth.size().area() * 2;
  139. src.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;
  140. src.buffer = depth.data;
  141. cv::Mat undistort_depth = cv::Mat(depth.size(), CV_16U);
  142. TY_IMAGE_DATA dst;
  143. dst.width = depth.cols;
  144. dst.height = depth.rows;
  145. dst.size = undistort_depth.size().area() * 2;
  146. dst.buffer = undistort_depth.data;
  147. dst.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;
  148. ASSERT_OK(TYUndistortImage(&pData->depth_calib, &src, NULL, &dst));
  149. depth = undistort_depth.clone();
  150. }
  151. pData->depthViewer->show(depth);
  152. }
  153. if (!color.empty()) {
  154. cv::imshow("color", color);
  155. }
  156. if (!depth.empty() && !color.empty()) {
  157. cv::Mat undistort_color, out;
  158. if (pData->needUndistort || MAP_DEPTH_TO_COLOR) {
  159. doRegister(pData->depth_calib, pData->color_calib, depth, pData->scale_unit, color, pData->needUndistort, undistort_color, out, MAP_DEPTH_TO_COLOR);
  160. }
  161. else {
  162. undistort_color = color;
  163. out = color;
  164. }
  165. cv::imshow("undistort color", undistort_color);
  166. cv::Mat tmp, gray8, bgr;
  167. if (MAP_DEPTH_TO_COLOR) {
  168. cv::Mat depthDisplay = pData->render->Compute(out);
  169. if(undistort_color.type() == CV_16U) {
  170. gray8 = cv::Mat(undistort_color.size(), CV_8U);
  171. cv::normalize(undistort_color, tmp, 0, 255, cv::NORM_MINMAX);
  172. cv::convertScaleAbs(tmp, gray8);
  173. cv::cvtColor(gray8, undistort_color, cv::COLOR_GRAY2BGR);
  174. } else if(undistort_color.type() == CV_16UC3) {
  175. bgr = cv::Mat(undistort_color.size(), CV_8UC3);
  176. cv::normalize(undistort_color, tmp, 0, 255, cv::NORM_MINMAX);
  177. cv::convertScaleAbs(tmp, bgr);
  178. undistort_color = bgr.clone();
  179. }
  180. depthDisplay = depthDisplay / 2 + undistort_color / 2;
  181. cv::imshow("depth2color RGBD", depthDisplay);
  182. }
  183. else {
  184. cv::imshow("mapped RGB", out);
  185. if(out.type() == CV_16U) {
  186. gray8 = cv::Mat(out.size(), CV_8U);
  187. cv::normalize(out, tmp, 0, 255, cv::NORM_MINMAX);
  188. cv::convertScaleAbs(tmp, gray8);
  189. cv::cvtColor(gray8, out, cv::COLOR_GRAY2BGR);
  190. } else if(undistort_color.type() == CV_16UC3) {
  191. bgr = cv::Mat(out.size(), CV_8UC3);
  192. cv::normalize(out, tmp, 0, 255, cv::NORM_MINMAX);
  193. cv::convertScaleAbs(tmp, bgr);
  194. out = bgr.clone();
  195. }
  196. cv::Mat depthDisplay = pData->render->Compute(depth);
  197. depthDisplay = depthDisplay / 2 + out / 2;
  198. cv::imshow("color2depth RGBD", depthDisplay);
  199. }
  200. }
  201. LOGD("=== Re-enqueue buffer(%p, %d)", frame->userBuffer, frame->bufferSize);
  202. ASSERT_OK(TYEnqueueBuffer(pData->hDevice, frame->userBuffer, frame->bufferSize));
  203. }
  204. void eventCallback(TY_EVENT_INFO *event_info, void *userdata)
  205. {
  206. if (event_info->eventId == TY_EVENT_DEVICE_OFFLINE) {
  207. LOGD("=== Event Callback: Device Offline!");
  208. // Note:
  209. // Please set TY_BOOL_KEEP_ALIVE_ONOFF feature to false if you need to debug with breakpoint!
  210. }
  211. else if (event_info->eventId == TY_EVENT_LICENSE_ERROR) {
  212. LOGD("=== Event Callback: License Error!");
  213. }
  214. }
  215. int main(int argc, char* argv[])
  216. {
  217. std::string ID, IP;
  218. TY_INTERFACE_HANDLE hIface = NULL;
  219. TY_DEV_HANDLE hDevice = NULL;
  220. for(int i = 1; i < argc; i++){
  221. if(strcmp(argv[i], "-id") == 0){
  222. ID = argv[++i];
  223. } else if(strcmp(argv[i], "-ip") == 0) {
  224. IP = argv[++i];
  225. }else if(strcmp(argv[i], "-h") == 0){
  226. LOGI("Usage: SimpleView_Registration [-h] [-id <ID>]");
  227. return 0;
  228. }
  229. }
  230. LOGD("=== Init lib");
  231. ASSERT_OK( TYInitLib() );
  232. TY_VERSION_INFO ver;
  233. ASSERT_OK( TYLibVersion(&ver) );
  234. LOGD(" - lib version: %d.%d.%d", ver.major, ver.minor, ver.patch);
  235. std::vector<TY_DEVICE_BASE_INFO> selected;
  236. ASSERT_OK( selectDevice(TY_INTERFACE_ALL, ID, IP, 1, selected) );
  237. ASSERT(selected.size() > 0);
  238. TY_DEVICE_BASE_INFO& selectedDev = selected[0];
  239. ASSERT_OK( TYOpenInterface(selectedDev.iface.id, &hIface) );
  240. ASSERT_OK( TYOpenDevice(hIface, selectedDev.id, &hDevice) );
  241. TY_COMPONENT_ID allComps;
  242. ASSERT_OK( TYGetComponentIDs(hDevice, &allComps) );
  243. if(!(allComps & TY_COMPONENT_RGB_CAM)){
  244. LOGE("=== Has no RGB camera, cant do registration");
  245. return -1;
  246. }
  247. TY_ISP_HANDLE isp_handle;
  248. ASSERT_OK(TYISPCreate(&isp_handle));
  249. ASSERT_OK(ColorIspInitSetting(isp_handle, hDevice));
  250. //You can turn on auto exposure function as follow ,but frame rate may reduce .
  251. //Device also may be casually stucked 1~2 seconds when software trying to adjust device exposure time value
  252. #if 0
  253. ASSERT_OK(ColorIspInitAutoExposure(isp_handle, hDevice));
  254. #endif
  255. LOGD("=== Configure components");
  256. TY_COMPONENT_ID componentIDs = TY_COMPONENT_DEPTH_CAM | TY_COMPONENT_RGB_CAM;
  257. ASSERT_OK( TYEnableComponents(hDevice, componentIDs) );
  258. // ASSERT_OK( TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_YUYV_640x480) );
  259. bool hasUndistortSwitch, hasDistortionCoef;
  260. ASSERT_OK( TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_UNDISTORTION, &hasUndistortSwitch) );
  261. ASSERT_OK( TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_DISTORTION, &hasDistortionCoef) );
  262. if (hasUndistortSwitch) {
  263. ASSERT_OK( TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_UNDISTORTION, true) );
  264. }
  265. LOGD("=== Prepare image buffer");
  266. uint32_t frameSize;
  267. ASSERT_OK( TYGetFrameBufferSize(hDevice, &frameSize) );
  268. LOGD(" - Get size of framebuffer, %d", frameSize);
  269. LOGD(" - Allocate & enqueue buffers");
  270. char* frameBuffer[2];
  271. frameBuffer[0] = new char[frameSize];
  272. frameBuffer[1] = new char[frameSize];
  273. LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[0], frameSize);
  274. ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[0], frameSize) );
  275. LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[1], frameSize);
  276. ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[1], frameSize) );
  277. LOGD("=== Register event callback");
  278. ASSERT_OK(TYRegisterEventCallback(hDevice, eventCallback, NULL));
  279. bool hasTriggerParam = false;
  280. ASSERT_OK( TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &hasTriggerParam) );
  281. if (hasTriggerParam) {
  282. LOGD("=== Disable trigger mode");
  283. TY_TRIGGER_PARAM trigger;
  284. trigger.mode = TY_TRIGGER_MODE_OFF;
  285. ASSERT_OK(TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &trigger, sizeof(trigger)));
  286. }
  287. DepthViewer depthViewer("Depth");
  288. DepthRender render;
  289. CallbackData cb_data;
  290. cb_data.index = 0;
  291. cb_data.hDevice = hDevice;
  292. cb_data.depthViewer = &depthViewer;
  293. cb_data.render = &render;
  294. cb_data.needUndistort = !hasUndistortSwitch && hasDistortionCoef;
  295. cb_data.IspHandle = isp_handle;
  296. float scale_unit = 1.;
  297. TYGetFloat(hDevice, TY_COMPONENT_DEPTH_CAM, TY_FLOAT_SCALE_UNIT, &scale_unit);
  298. cb_data.scale_unit = scale_unit;
  299. depthViewer.depth_scale_unit = scale_unit;
  300. LOGD("=== Read depth calib info");
  301. ASSERT_OK( TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA
  302. , &cb_data.depth_calib, sizeof(cb_data.depth_calib)) );
  303. LOGD("=== Read color calib info");
  304. ASSERT_OK( TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA
  305. , &cb_data.color_calib, sizeof(cb_data.color_calib)) );
  306. ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_DISTORTION, &cb_data.isTof));
  307. LOGD("=== Start capture");
  308. ASSERT_OK( TYStartCapture(hDevice) );
  309. LOGD("=== Wait for callback");
  310. bool exit_main = false;
  311. while(!exit_main){
  312. TY_FRAME_DATA frame;
  313. int err = TYFetchFrame(hDevice, &frame, -1);
  314. if( err != TY_STATUS_OK ) {
  315. LOGE("Fetch frame error %d: %s", err, TYErrorString(err));
  316. break;
  317. }
  318. handleFrame(&frame, &cb_data);
  319. TYISPUpdateDevice(cb_data.IspHandle);
  320. int key = cv::waitKey(1);
  321. switch(key & 0xff){
  322. case 0xff:
  323. break;
  324. case 'q':
  325. exit_main = true;
  326. break;
  327. default:
  328. LOGD("Pressed key %d", key);
  329. }
  330. }
  331. ASSERT_OK( TYStopCapture(hDevice) );
  332. ASSERT_OK( TYCloseDevice(hDevice) );
  333. ASSERT_OK( TYCloseInterface(hIface) );
  334. ASSERT_OK( TYDeinitLib() );
  335. delete frameBuffer[0];
  336. delete frameBuffer[1];
  337. LOGD("=== Main done!");
  338. return 0;
  339. }