main.cpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530
  1. #include <opencv2/opencv.hpp>
  2. #include <opencv2/core/core.hpp>
  3. #include <opencv2/highgui/highgui.hpp>
  4. #include <string>
  5. #include <iostream>
  6. #include <zconf.h>
  7. #include <csignal>
  8. #include <thread>
  9. #include "MvGmslCamera.h"
  10. #include <fstream>
  11. #include <chrono>
  12. #include <fcntl.h>
  13. #include <unistd.h>
  14. #include <sys/ioctl.h>
  15. #include <linux/videodev2.h>
  16. #include <thread>
  17. #define VIDEO_OUT "/dev/video8"
  18. using namespace std::chrono;
  19. using std::string;
  20. sig_atomic_t exitRequested = 0;
  21. uint camera_num = 6;
  22. struct sync_out_a_cfg_client_t stCameraCfgSend = {};
  23. char dev_node[32] = "/dev/video0";
  24. std::string camera_fmt_str = "UYVY";
  25. std::string output_fmt_str = "BGRA32";
  26. uint cam_w = 1920;
  27. uint cam_h = 1080;
  28. // true font 1 false back 0
  29. bool MODEL = true;
  30. uint64_t timestampbefore[8] = {0};
  31. uint64_t LinuxGetFrameTimeBefore[8] = {0};
  32. void handler(int)
  33. {
  34. std::cout << "will exit..." << std::endl;
  35. exitRequested = true;
  36. }
  37. std::string GetCurrentTimeStamp(int time_stamp_type = 0)
  38. {
  39. std::chrono::system_clock::time_point now = std::chrono::system_clock::now();
  40. std::time_t now_time_t = std::chrono::system_clock::to_time_t(now);
  41. std::tm *now_tm = std::localtime(&now_time_t);
  42. char buffer[128];
  43. strftime(buffer, sizeof(buffer), "%F %T", now_tm);
  44. std::ostringstream ss;
  45. ss.fill('0');
  46. std::chrono::milliseconds ms;
  47. std::chrono::microseconds cs;
  48. std::chrono::nanoseconds ns;
  49. switch (time_stamp_type)
  50. {
  51. case 0:
  52. ss << buffer;
  53. break;
  54. case 1:
  55. ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;
  56. ss << buffer << ":" << ms.count();
  57. break;
  58. case 2:
  59. ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;
  60. cs = std::chrono::duration_cast<std::chrono::microseconds>(now.time_since_epoch()) % 1000000;
  61. ss << buffer << ":" << ms.count() << ":" << cs.count() % 1000;
  62. break;
  63. case 3:
  64. ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;
  65. cs = std::chrono::duration_cast<std::chrono::microseconds>(now.time_since_epoch()) % 1000000;
  66. ns = std::chrono::duration_cast<std::chrono::nanoseconds>(now.time_since_epoch()) % 1000000000;
  67. ss << buffer << ":" << ms.count() << ":" << cs.count() % 1000 << ":" << ns.count() % 1000;
  68. break;
  69. default:
  70. ss << buffer;
  71. break;
  72. }
  73. return ss.str();
  74. }
  75. //------------- F 1920 * 1080
  76. cv::Mat meger;
  77. uint meger_width = 2020;
  78. uint meger_height = 970;
  79. // 0 1 2 3 4 5
  80. int f_frame_size[] = {1280, 640, 1280, 640, 640, 360, 640, 360, 640, 320, 640, 320};
  81. cv::Rect f_rectCenter = cv::Rect(370, 330, f_frame_size[0], f_frame_size[1]); // font frame_0
  82. cv::Rect f_rectCenterUp = cv::Rect(650, 0, f_frame_size[2] - 560, f_frame_size[3] - 320); // back frame_1
  83. // cv::Rect frame1_rect = cv::Rect(160, 90, f_frame_size[2] - 320, f_frame_size[3] - 180);
  84. cv::Rect f_rectLeftDown = cv::Rect(0, 330, f_frame_size[5], f_frame_size[4]); // left frame_2
  85. // cv::Rect frame2_rect = cv::Rect(60, 0, f_frame_size[4] - 120, f_frame_size[5]);
  86. cv::Rect f_rectRightDown = cv::Rect(1660, 330, f_frame_size[7], f_frame_size[6]); // right frame_3
  87. // cv::Rect frame3_rect = cv::Rect(60, 0, f_frame_size[6] - 120, f_frame_size[7]);
  88. // cv::Rect f_rectLeftUp = cv::Rect(0, 0, f_frame_size[8], f_frame_size[9]); // chabao left frame_4
  89. // cv::Rect f_rectRightUp = cv::Rect(1380, 0, f_frame_size[10], f_frame_size[11]); // chabao right frame_5
  90. cv::Rect f_rectLeftUp = cv::Rect(1380, 0, f_frame_size[8], f_frame_size[9]); // chabao left frame_4
  91. cv::Rect f_rectRightUp = cv::Rect(0, 0, f_frame_size[10], f_frame_size[11]); // chabao right frame_5
  92. cv::Mat frame_0, frame_1, frame_2, frame_3, frame_4, frame_5;
  93. int is_save = false;
  94. cv::Mat fCmeraMeger(cv::Mat outMat[])
  95. {
  96. for (uint32_t i = 0; i < camera_num; i++)
  97. {
  98. switch (i)
  99. {
  100. case 0:
  101. if(is_save){
  102. // auto now = std::chrono::system_clock::now();
  103. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  104. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  105. cv::imwrite("/home/nvidia/newdisk/hkpc/1.jpg", outMat[i]);
  106. }
  107. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  108. frame_0 = outMat[i];
  109. break;
  110. case 1:
  111. if(is_save){
  112. // auto now = std::chrono::system_clock::now();
  113. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  114. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  115. cv::imwrite("/home/nvidia/newdisk/hkpc/2.jpg", outMat[i]);
  116. }
  117. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  118. frame_1 = outMat[i];
  119. resize(frame_1, frame_1, cv::Size(720, 320));
  120. break;
  121. case 2:
  122. if(is_save){
  123. // auto now = std::chrono::system_clock::now();
  124. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  125. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  126. //cv::imwrite("/home/nvidia/newdisk/hkpc/3.jpg", outMat[i]);
  127. }
  128. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  129. frame_2 = outMat[i];
  130. // ROTATE_90_CLOCKWISE
  131. // ROTATE_180
  132. // ROTATE_90_COUNTERCLOCKWISE
  133. cv::rotate(frame_2, frame_2, cv::ROTATE_90_CLOCKWISE);
  134. //cv::rotate(frame_2, frame_2, cv::ROTATE_90_COUNTERCLOCKWISE);
  135. break;
  136. case 3:
  137. if(is_save){
  138. // auto now = std::chrono::system_clock::now();
  139. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  140. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  141. //cv::imwrite("/home/nvidia/newdisk/hkpc/4.jpg", outMat[i]);
  142. is_save = false;
  143. }
  144. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  145. frame_3 = outMat[i];
  146. // cv::rotate(frame_3, frame_3, cv::ROTATE_90_CLOCKWISE);
  147. cv::rotate(frame_3, frame_3, cv::ROTATE_90_COUNTERCLOCKWISE);
  148. break;
  149. case 4:
  150. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  151. frame_4 = outMat[i];
  152. break;
  153. case 5:
  154. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  155. frame_5 = outMat[i];
  156. break;
  157. default:
  158. break;
  159. }
  160. }
  161. // cv::Mat f_meger = cv::Mat::zeros(meger_height, meger_width, frame_0.type());
  162. frame_0.copyTo(meger(f_rectCenter));
  163. frame_1.copyTo(meger(f_rectCenterUp));
  164. frame_2.copyTo(meger(f_rectLeftDown));
  165. frame_3.copyTo(meger(f_rectRightDown));
  166. frame_4.copyTo(meger(f_rectLeftUp));
  167. frame_5.copyTo(meger(f_rectRightUp));
  168. return meger;
  169. }
  170. // uint meger_width = 1060;
  171. // uint meger_height = 550;
  172. //-------R 1902 * 852 //font_0 back_1 left_2 right_3 cha1_4 cha2_5
  173. int r_frame_size[] = {1280, 640, 1280, 640, 640, 360, 640, 360, 640, 320, 640, 320};
  174. cv::Rect r_rectCenterUp = cv::Rect(650, 0, r_frame_size[0] - 560, r_frame_size[1] - 320); // font frame_0
  175. //cv::Rect r_frame0_rect = cv::Rect(120, 90, r_frame_size[0] - 240, r_frame_size[1] - 180);
  176. cv::Rect r_rectCenter = cv::Rect(370, 330, r_frame_size[2], r_frame_size[3]); // back frame_1
  177. cv::Rect r_rectRightDown = cv::Rect(1660, 330, r_frame_size[5], r_frame_size[4]); // left frame_2
  178. // cv::Rect r_frame2_rect = cv::Rect(60, 0, r_frame_size[4] - 120, r_frame_size[5]);
  179. cv::Rect r_rectLeftDown = cv::Rect(0, 330, r_frame_size[7], r_frame_size[6]); // right frame_3
  180. // cv::Rect r_frame3_rect = cv::Rect(60, 0, r_frame_size[6] - 120, r_frame_size[7]);
  181. cv::Rect r_rectRightUp = cv::Rect(1380, 0, r_frame_size[8], r_frame_size[9]); // chabao left frame_4
  182. cv::Rect r_rectLeftUp = cv::Rect(0, 0, r_frame_size[10], r_frame_size[11]); // chabao right frame_5
  183. cv::Mat r_frame_0, r_frame_1, r_frame_2, r_frame_3, r_frame_4, r_frame_5;
  184. cv::Mat rCmeraMeger(cv::Mat outMat[])
  185. {
  186. for (uint32_t i = 0; i < camera_num; i++)
  187. {
  188. switch (i)
  189. {
  190. case 0:
  191. if(is_save){
  192. // auto now = std::chrono::system_clock::now();
  193. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  194. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  195. cv::imwrite("/home/nvidia/newdisk/hkpc/1.jpg", outMat[i]);
  196. }
  197. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  198. r_frame_0 = outMat[i];
  199. resize(r_frame_0, r_frame_0, cv::Size(720, 320));
  200. break;
  201. case 1:
  202. if(is_save){
  203. // auto now = std::chrono::system_clock::now();
  204. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  205. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  206. cv::imwrite("/home/nvidia/newdisk/hkpc/2.jpg", outMat[i]);
  207. }
  208. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  209. r_frame_1 = outMat[i];
  210. break;
  211. case 2:
  212. if(is_save){
  213. // auto now = std::chrono::system_clock::now();
  214. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  215. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  216. //cv::imwrite("/home/nvidia/newdisk/hkpc/3.jpg", outMat[i]);
  217. }
  218. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  219. r_frame_2 = outMat[i];
  220. cv::rotate(r_frame_2, r_frame_2, cv::ROTATE_90_CLOCKWISE);
  221. break;
  222. case 3:
  223. if(is_save){
  224. // auto now = std::chrono::system_clock::now();
  225. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  226. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  227. //cv::imwrite("/home/nvidia/newdisk/hkpc/4.jpg", outMat[i]);
  228. is_save = false;
  229. }
  230. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  231. r_frame_3 = outMat[i];
  232. cv::rotate(r_frame_3, r_frame_3, cv::ROTATE_90_COUNTERCLOCKWISE);
  233. break;
  234. case 4:
  235. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  236. r_frame_4 = outMat[i];
  237. break;
  238. case 5:
  239. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  240. r_frame_5 = outMat[i];
  241. break;
  242. default:
  243. break;
  244. }
  245. }
  246. // cv::Mat f_meger = cv::Mat::zeros(meger_height, meger_width, frame_0.type());
  247. r_frame_0.copyTo(meger(r_rectCenterUp));
  248. r_frame_1.copyTo(meger(r_rectCenter));
  249. r_frame_2.copyTo(meger(r_rectRightDown));
  250. r_frame_3.copyTo(meger(r_rectLeftDown));
  251. r_frame_4.copyTo(meger(r_rectRightUp));
  252. r_frame_5.copyTo(meger(r_rectLeftUp));
  253. return meger;
  254. }
  255. // 线程
  256. std::ifstream inFile;
  257. char c;
  258. bool current_model = true;
  259. void thread_1()
  260. {
  261. while (1)
  262. {
  263. sleep(0.1);
  264. try
  265. {
  266. std::ifstream inFile("/tmp/vedioFront", std::ios::in | std::ios::binary);
  267. if (!inFile)
  268. {
  269. // std::cout << "error" << std::endl;
  270. // return 0;
  271. }
  272. else
  273. {
  274. inFile.get(c);
  275. inFile.close();
  276. current_model = c - '0';
  277. }
  278. }
  279. catch (const std::exception &e)
  280. {
  281. std::cerr << e.what() << '\n';
  282. }
  283. }
  284. }
  285. /*图像采集时的时间戳记录,将时间戳间隔与帧数不相符的相关时间信息打印到/tmp/cameras_sdk_demo.log文件中,每个设备生成单独log文件*/
  286. void CheckTimeStampLog(uint64_t timestamp,uint8_t camera_no)
  287. {
  288. uint64_t FrameInterval = 0;
  289. char buffer[256] = {0};
  290. uint64_t LinuxFrameInterval{};
  291. struct timeval cur_time;
  292. uint64_t LinuxGetFrameTime{};
  293. uint64_t time_interval{};
  294. uint64_t FrameTransferDelay{};
  295. FILE * file_diff = NULL;
  296. char file_name[100] = {0};
  297. if(0 == timestamp)
  298. {
  299. /*camera Data is not available during camera preparation*/
  300. return;
  301. }
  302. FrameInterval = timestamp - timestampbefore[camera_no];
  303. timestampbefore[camera_no] = timestamp;
  304. gettimeofday(&cur_time, NULL);
  305. LinuxGetFrameTime = cur_time.tv_sec * 1000000000 + cur_time.tv_usec * 1000;
  306. LinuxFrameInterval = LinuxGetFrameTime - LinuxGetFrameTimeBefore[camera_no];
  307. LinuxGetFrameTimeBefore[camera_no] = LinuxGetFrameTime;
  308. FrameTransferDelay = LinuxGetFrameTime - timestamp;
  309. if(stCameraCfgSend.sync_freq != 0)
  310. time_interval = 1000000000 / stCameraCfgSend.sync_freq;
  311. else{
  312. time_interval = 1000000000 / stCameraCfgSend.async_freq;
  313. }
  314. if(((FrameInterval > (time_interval + 12000000) || FrameInterval < (time_interval - 12000000))) && (FrameInterval != 0))
  315. {
  316. sprintf(file_name,"/tmp/cameras_sdk_demo_video%d.log",camera_no);
  317. file_diff = fopen(file_name,"a+");
  318. sprintf(buffer,"%s Timestamp : %ld FrameInterval : %ld FrameTransferDelay : %ld LinuxGetFrameTime : %ld LinuxFrameInterval : %ld\n"
  319. ,GetCurrentTimeStamp(1).c_str(),timestamp,FrameInterval,FrameTransferDelay,LinuxGetFrameTime,LinuxFrameInterval);
  320. fwrite(buffer,sizeof(char),strlen(buffer),file_diff);
  321. fflush(file_diff);
  322. fclose(file_diff);
  323. }
  324. if(atoi(getenv("CHECK_TIME")))
  325. {
  326. printf("%s Timestamp : %ld FrameInterval : %ld FrameTransferDelay : %ld LinuxGetFrameTime : %ld LinuxFrameInterval : %ld\n"
  327. ,GetCurrentTimeStamp(1).c_str(),timestamp,FrameInterval,FrameTransferDelay,LinuxGetFrameTime,LinuxFrameInterval);
  328. }
  329. }
  330. /*demo程序主函数,分别打开n个窗口,并通过反复分别调用GetImageCvMat和GetImagePtr接口获取图像和时间戳在窗口中显示*/
  331. int main(int argc, char *argv[])
  332. {
  333. // open output device
  334. int output = open(VIDEO_OUT, O_RDWR);
  335. if (output < 0)
  336. {
  337. std::cerr << "ERROR: could not open output device!\n"
  338. << strerror(errno);
  339. return -2;
  340. }
  341. // configure params for output device
  342. struct v4l2_format vid_format;
  343. memset(&vid_format, 0, sizeof(vid_format));
  344. vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
  345. if (ioctl(output, VIDIOC_G_FMT, &vid_format) < 0)
  346. {
  347. std::cerr << "ERROR: unable to get video format!\n"
  348. << strerror(errno);
  349. return -1;
  350. }
  351. // output to video8
  352. size_t framesize = meger_width * meger_height * 3;
  353. vid_format.fmt.pix.width = meger_width;
  354. vid_format.fmt.pix.height = meger_height;
  355. vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
  356. vid_format.fmt.pix.sizeimage = framesize;
  357. vid_format.fmt.pix.field = V4L2_FIELD_NONE;
  358. if (ioctl(output, VIDIOC_S_FMT, &vid_format) < 0)
  359. {
  360. std::cerr << "ERROR: unable to set video format!\n"
  361. << strerror(errno);
  362. return -1;
  363. }
  364. camera_context_t ctx[8] = {};
  365. stCameraCfgSend.async_camera_num = 0;
  366. stCameraCfgSend.async_freq = 0;
  367. stCameraCfgSend.async_camera_bit_draw = 0;
  368. stCameraCfgSend.sync_camera_num = 8;
  369. stCameraCfgSend.sync_freq = 30;
  370. stCameraCfgSend.sync_camera_bit_draw = 0xff;
  371. // setenv("CHECK_TIME","0",0);
  372. char dev_node_tmp = dev_node[10];
  373. for (int i = 0; i < camera_num; i++)
  374. {
  375. dev_node[10] = dev_node_tmp + i;
  376. ctx[i].dev_node = dev_node;
  377. ctx[i].camera_fmt_str = camera_fmt_str;
  378. ctx[i].output_fmt_str = output_fmt_str;
  379. ctx[i].cam_w = cam_w;
  380. ctx[i].cam_h = cam_h;
  381. ctx[i].out_w = f_frame_size[i * 2];
  382. ctx[i].out_h = f_frame_size[i * 2 + 1];
  383. }
  384. miivii::MvGmslCamera mvcam(ctx, camera_num, stCameraCfgSend);
  385. cv::Mat outMat[camera_num];
  386. uint8_t *outbuf[camera_num];
  387. cv::Mat imgbuf[camera_num];
  388. signal(SIGINT, &handler);
  389. bool quit = false;
  390. uint64_t timestamp;
  391. // background
  392. meger = cv::imread("./base.jpg");
  393. cv::cvtColor(meger, meger, cv::COLOR_BGRA2RGB);
  394. resize(meger, meger, cv::Size(meger_width, meger_height));
  395. uint8_t camera_no = dev_node[10] - 0x30;
  396. cv::Mat mege_frame;
  397. int count = 0;
  398. bool is_open_file = false;
  399. // 读取文件
  400. std::thread file_thread(thread_1);
  401. file_thread.detach();
  402. while (!quit)
  403. {
  404. if (exitRequested)
  405. {
  406. quit = true;
  407. break;
  408. }
  409. // count++;
  410. // if(count > 30){
  411. // is_save = true;
  412. // count = 0;
  413. // }
  414. auto start = std::chrono::high_resolution_clock::now();
  415. /*use cv data to get image*/
  416. if (mvcam.GetImageCvMat(outMat, timestamp, camera_no))
  417. {
  418. // for (uint32_t i = 0; i < camera_num; i++) {
  419. // CheckTimeStampLog(timestamp,dev_node_tmp - 0x30 + i);
  420. // }
  421. auto end = std::chrono::high_resolution_clock::now();
  422. auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
  423. if (duration.count() > 10) {
  424. std::cout << "run more than 10 ms" << std::endl;
  425. break;
  426. }
  427. if (current_model != MODEL)
  428. {
  429. meger = cv::imread("./base.jpg");
  430. cv::cvtColor(meger, meger, cv::COLOR_BGRA2RGB);
  431. resize(meger, meger, cv::Size(meger_width, meger_height));
  432. MODEL = current_model;
  433. }
  434. if (MODEL)
  435. {
  436. mege_frame = fCmeraMeger(outMat);
  437. }
  438. else
  439. {
  440. mege_frame = rCmeraMeger(outMat);
  441. }
  442. cv::putText(mege_frame, GetCurrentTimeStamp(0), cv::Point(870, 25), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(255, 255, 255), 2);
  443. // write frame to output device
  444. size_t written = write(output, mege_frame.data, framesize);
  445. if (written < 0)
  446. {
  447. std::cerr << "ERROR: could not write to output device!\n";
  448. close(output);
  449. break;
  450. }
  451. }
  452. else
  453. {
  454. std::cerr << "Can't get image form camera." << std::endl;
  455. }
  456. if (cv::waitKey(1) == 27)
  457. {
  458. break;
  459. }
  460. sleep(0.002);
  461. }
  462. close(output);
  463. return 0;
  464. }